def setUp(self): self.gssha_project_directory = path.join(self.writeDirectory, 'gssha_project') self.gssha_project_file = 'grid_standard_basic.prj' self.elevation_path = path.join(self.writeDirectory, 'gmted_elevation.tif') self.shapefile_path = path.join(self.writeDirectory, 'phillipines_5070115700.shp') self.compare_path = path.join(self.readDirectory, 'phillipines', 'compare_data') # copy gssha project try: copytree( path.join(self.readDirectory, 'phillipines', 'gssha_project'), self.gssha_project_directory) except OSError: pass # copy elevation grid try: copy( path.join(self.readDirectory, 'phillipines', 'gmted_elevation.tif'), self.elevation_path) except OSError: pass # copy shapefile shapefile_basename = path.join(self.readDirectory, 'phillipines', 'phillipines_5070115700.*') for shapefile_part in glob(shapefile_basename): try: copy( shapefile_part, path.join(self.writeDirectory, path.basename(shapefile_part))) except OSError: pass # Create Test DB sqlalchemy_url, sql_engine = dbt.init_sqlite_memory() # Create DB Sessions self.db_session = dbt.create_session(sqlalchemy_url, sql_engine) # Instantiate GSSHAPY object for reading to database self.project_manager = ProjectFile(name='grid_standard_ele') # read project file self.project_manager.readInput(directory=self.gssha_project_directory, projectFileName=self.gssha_project_file, session=self.db_session) self.ele_file = ElevationGridFile(project_file=self.project_manager, session=self.db_session) chdir(self.gssha_project_directory)
def prepare_max_depth_map(user, result_url, job, depthMapDir, CKAN_engine): '''This code prepares the max depth map and stores it in CKAN''' # Clear the results folder clear_folder(depthMapDir) # Create gsshapy_session gsshapy_session = gsshapy_sessionmaker() # Get project file id project_file_id = job.new_model_id # Extract the GSSHA file extract_path, unique_dir = extract_zip_from_url(user, result_url, depthMapDir) # Find the project file for root, dirs, files in os.walk(depthMapDir): for file in files: if file.endswith(".prj"): project_name = file project_path = os.path.join(root, file) read_dir = os.path.dirname(project_path) depth_file = project_path[:-3] + "kmz" resource_name = project_name[:-4] + " max depth" # Create an empty Project File Object project_file = ProjectFile() # Invoke the read command on the Project File Object to get the output files in the database project_file.readOutput(directory=read_dir, projectFileName=project_name, session=gsshapy_session, spatial=True) # Create a kml using the depth map try: depth_map_raster = gsshapy_session.query(WMSDatasetFile).filter( WMSDatasetFile.projectFileID == project_file.id).filter( WMSDatasetFile.fileExtension == "gfl").one() depth_map_raster.getAsKmlPngAnimation( session=gsshapy_session, projectFile=project_file, path=depth_file, colorRamp=ColorRampEnum.COLOR_RAMP_AQUA, alpha=0.75, cellSize=10) depth_raster = check_dataset("depth-maps", CKAN_engine) result, status = add_depth_map_CKAN(depth_raster, CKAN_engine, depth_file, resource_name) except: result = {'url': ""} return result
def test_project_file_read_output(self): """ Test ProjectFile read output method """ # Instantiate GSSHAPY ProjectFile object prjR = ProjectFile() # Invoke read output method prjR.readOutput(directory=self.directory, projectFileName='standard.prj', session=self.readSession) # Query Project File prjQ = self.querySession.query(ProjectFile).one()
def prepare_max_depth_map(user, result_url, job, depthMapDir, CKAN_engine): '''This code prepares the max depth map and stores it in CKAN''' # Clear the results folder clear_folder(depthMapDir) # Create gsshapy_session gsshapy_session = gsshapy_sessionmaker() # Get project file id project_file_id = job.new_model_id # Extract the GSSHA file extract_path, unique_dir = extract_zip_from_url(user, result_url, depthMapDir) # Find the project file for root, dirs, files in os.walk(depthMapDir): for file in files: if file.endswith(".prj"): project_name = file project_path = os.path.join(root, file) read_dir = os.path.dirname(project_path) depth_file = project_path[:-3]+"kmz" resource_name = project_name[:-4]+" max depth" # Create an empty Project File Object project_file = ProjectFile() # Invoke the read command on the Project File Object to get the output files in the database project_file.readOutput(directory=read_dir, projectFileName=project_name, session=gsshapy_session, spatial=True) # Create a kml using the depth map try: depth_map_raster = gsshapy_session.query(WMSDatasetFile).filter(WMSDatasetFile.projectFileID == project_file.id).filter(WMSDatasetFile.fileExtension == "gfl").one() depth_map_raster.getAsKmlPngAnimation(session=gsshapy_session, projectFile=project_file, path=depth_file,colorRamp = ColorRampEnum.COLOR_RAMP_AQUA, alpha=0.75, cellSize=10) depth_raster = check_dataset("depth-maps", CKAN_engine) result, status = add_depth_map_CKAN(depth_raster, CKAN_engine, depth_file, resource_name) except: result={'url':""} return result
def setUp(self): # Find db directory path here = os.path.abspath(os.path.dirname(__file__)) dbName = '%s.db' % uuid.uuid4() self.db_path = os.path.join(here, 'db', 'standard.db') # Create Test DB sqlalchemy_url = dbt.init_sqlite_db(self.db_path) # Define workspace self.readDirectory = os.path.join(here, 'standard') self.writeDirectory = os.path.join(here, 'out') self.original = 'standard' self.name = 'standard' self.dir_list = ('run_2014_to_2017', 'run_2014_to_2017_2', 'run_2015_to_2017', 'run_2015_to_2017_1', 'run_2016_to_2017') for subdir in self.dir_list: try: os.mkdir(os.path.join(self.readDirectory, subdir)) except OSError: pass # Create DB Sessions session_maker = dbt.get_sessionmaker(sqlalchemy_url) readSession = session_maker() # Instantiate GSSHAPY ProjectFile object prjR = ProjectFile() # Invoke read project method prjR.readProject(directory=self.readDirectory, projectFileName='standard.prj', session=readSession) readSession.close() # create write session self.writeSession = session_maker()
def setUp(self): self.gssha_project_directory = self.writeDirectory self.shapefile_path = path.join(self.writeDirectory, 'phillipines_5070115700.shp') self.compare_path = path.join(self.readDirectory, 'phillipines', 'compare_data') # copy shapefile shapefile_basename = path.join(self.readDirectory, 'phillipines', 'phillipines_5070115700.*') for shapefile_part in glob(shapefile_basename): try: copy( shapefile_part, path.join(self.writeDirectory, path.basename(shapefile_part))) except OSError: pass # Create Test DB sqlalchemy_url, sql_engine = dbt.init_sqlite_memory() # Create DB Sessions self.db_session = dbt.create_session(sqlalchemy_url, sql_engine) # Instantiate GSSHAPY object for reading to database self.project_manager = ProjectFile(name="grid_standard_msk", map_type=1) self.db_session.add(self.project_manager) self.db_session.commit() self.msk_file = WatershedMaskFile(project_file=self.project_manager, session=self.db_session) chdir(self.gssha_project_directory)
def test_add_land_cover_map_table(self): """ Tests adding land cover to map table """ land_use_to_roughness_table = path.join(path.dirname(path.realpath(__file__)), '..', 'gsshapy', 'grid', 'land_cover', 'land_cover_glcf_modis.txt' ) chdir(self.gssha_project_directory) # Create Test DB sqlalchemy_url, sql_engine = dbt.init_sqlite_memory() # Create DB Sessions db_session = dbt.create_session(sqlalchemy_url, sql_engine) # Instantiate GSSHAPY object for reading to database project_manager = ProjectFile() # Call read method project_manager.readInput(directory=self.gssha_project_directory, projectFileName=self.gssha_project_file, session=db_session) project_manager.mapTableFile.addRoughnessMapFromLandUse("roughness", db_session, self.land_use_grid, land_use_to_roughness_table, #land_use_grid_id='glcf', ) # WRITE OUT UPDATED GSSHA PROJECT FILE project_manager.writeInput(session=db_session, directory=self.gssha_project_directory, name=path.splitext(self.gssha_project_file)[0]) db_session.close() # compare prj original_prj_file = path.join(self.readDirectory, 'land_cover', 'grid_standard.prj') new_prj_file = path.join(self.gssha_project_directory, 'grid_standard.prj') self._compare_files(original_prj_file, new_prj_file) # compare cmt original_cmt_file = path.join(self.readDirectory, 'land_cover', 'grid_standard.cmt') new_cmt_file = path.join(self.gssha_project_directory, 'grid_standard.cmt') self._compare_files(original_cmt_file, new_cmt_file) # compare idx original_idx_file = path.join(self.readDirectory, 'land_cover', 'roughness.idx') new_idx_file = path.join(self.gssha_project_directory, 'roughness.idx') self._compare_files(original_idx_file, new_idx_file, raster=True)
def test_add_land_cover_map_table_twice(self): """ Tests adding land cover to map table run twice """ # Create Test DB sqlalchemy_url, sql_engine = dbt.init_sqlite_memory() session_maker = dbt.get_sessionmaker(sqlalchemy_url, sql_engine) # run twice to ensure uniqueness for i in range(2): # Create DB Sessions db_session = session_maker() # Instantiate GSSHAPY object for reading to database project_manager = ProjectFile() # Call read method project_manager.readInput(directory=self.gssha_project_directory, projectFileName=self.gssha_project_file, session=db_session) project_manager.mapTableFile.addRoughnessMapFromLandUse("roughness", db_session, self.land_use_grid, land_use_grid_id='glcf', ) # WRITE OUT UPDATED GSSHA PROJECT FILE project_manager.writeInput(session=db_session, directory=self.gssha_project_directory, name=path.splitext(self.gssha_project_file)[0]) db_session.close() # compare prj original_prj_file = path.join(self.readDirectory, 'land_cover', 'grid_standard.prj') new_prj_file = path.join(self.gssha_project_directory, 'grid_standard.prj') self._compare_files(original_prj_file, new_prj_file) # compare cmt original_cmt_file = path.join(self.readDirectory, 'land_cover', 'grid_standard.cmt') new_cmt_file = path.join(self.gssha_project_directory, 'grid_standard.cmt') self._compare_files(original_cmt_file, new_cmt_file) # compare idx original_idx_file = path.join(self.readDirectory, 'land_cover', 'roughness.idx') new_idx_file = path.join(self.gssha_project_directory, 'roughness.idx') self._compare_files(original_idx_file, new_idx_file, raster=True)
class TestMask(TestGridTemplate): def setUp(self): self.gssha_project_directory = self.writeDirectory self.shapefile_path = path.join(self.writeDirectory, 'phillipines_5070115700.shp') self.compare_path = path.join(self.readDirectory, 'phillipines', 'compare_data') # copy shapefile shapefile_basename = path.join(self.readDirectory, 'phillipines', 'phillipines_5070115700.*') for shapefile_part in glob(shapefile_basename): try: copy( shapefile_part, path.join(self.writeDirectory, path.basename(shapefile_part))) except OSError: pass # Create Test DB sqlalchemy_url, sql_engine = dbt.init_sqlite_memory() # Create DB Sessions self.db_session = dbt.create_session(sqlalchemy_url, sql_engine) # Instantiate GSSHAPY object for reading to database self.project_manager = ProjectFile(name="grid_standard_msk", map_type=1) self.db_session.add(self.project_manager) self.db_session.commit() self.msk_file = WatershedMaskFile(project_file=self.project_manager, session=self.db_session) chdir(self.gssha_project_directory) def _compare_output(self, project_name): """ compare mask files """ # compare mask files mask_file_name = '{0}.msk'.format(project_name) new_mask_grid = path.join(self.writeDirectory, mask_file_name) compare_msk_file = path.join(self.compare_path, mask_file_name) self._compare_files(compare_msk_file, new_mask_grid, raster=True) # compare project files prj_file_name = '{0}.prj'.format(project_name) generated_prj_file = path.join(self.gssha_project_directory, prj_file_name) compare_prj_file = path.join(self.compare_path, prj_file_name) self._compare_files(generated_prj_file, compare_prj_file) # check to see if projection file generated proj_file_name = '{0}_prj.pro'.format(project_name) generated_proj_file = path.join(self.gssha_project_directory, proj_file_name) compare_proj_file = path.join(self.compare_path, proj_file_name) self._compare_files(generated_proj_file, compare_proj_file) def _before_teardown(self): """ Method to execute at beginning of tearDown """ self.db_session.close() def test_rasterize_cell_size_ascii_utm(self): """ Tests rasterize_shapefile using cell size to ascii in utm """ project_name = 'grid_standard_msk' mask_name = '{0}.msk'.format(project_name) self.msk_file.generateFromWatershedShapefile( self.shapefile_path, cell_size=1000, out_raster_path=mask_name, ) self.project_manager.writeInput(session=self.db_session, directory=self.gssha_project_directory, name=project_name) # compare results self._compare_output(project_name) def test_rasterize_cell_size_ascii_utm_outlet(self): """ Tests rasterize_shapefile using cell size to ascii in utm Then add outlet information """ project_name = 'grid_standard_msk_outlet' mask_name = '{0}.msk'.format(project_name) self.msk_file.generateFromWatershedShapefile( self.shapefile_path, cell_size=1000, out_raster_path=mask_name, ) self.project_manager.setOutlet(col=0, row=9) self.project_manager.writeInput(session=self.db_session, directory=self.gssha_project_directory, name=project_name) # compare results self._compare_output(project_name)
* License: BSD 2-Clause ******************************************************************************** """ # Create a GsshaPy PostGIS database from gsshapy.lib import db_tools as dbt sqlalchemy_url = dbt.sqlalchemy_url = dbt.init_postgresql_db(username='******', host='localhost', database='gsshapy_tutorial', port='5432', password='******') # Create SQLAlchemy session object for db interaction session = dbt.create_session(sqlalchemy_url) # Read Files to a Database --------------------------------------------------------------------------------------------# # Instantiate ProjectFile file object from gsshapy.orm import ProjectFile projectFile = ProjectFile() # Read file into database readDirectory = '/path_to/tutorial-data' filename = 'parkcity.prj' projectFile.read(directory=readDirectory, filename=filename, session=session) # Inspect supporting objects projectCards = projectFile.projectCards for card in projectCards: print card for card in projectCards: print card.name, card.value
class TestElevation(TestGridTemplate): def setUp(self): self.gssha_project_directory = path.join(self.writeDirectory, 'gssha_project') self.gssha_project_file = 'grid_standard_basic.prj' self.elevation_path = path.join(self.writeDirectory, 'gmted_elevation.tif') self.shapefile_path = path.join(self.writeDirectory, 'phillipines_5070115700.shp') self.compare_path = path.join(self.readDirectory, 'phillipines', 'compare_data') # copy gssha project try: copytree( path.join(self.readDirectory, 'phillipines', 'gssha_project'), self.gssha_project_directory) except OSError: pass # copy elevation grid try: copy( path.join(self.readDirectory, 'phillipines', 'gmted_elevation.tif'), self.elevation_path) except OSError: pass # copy shapefile shapefile_basename = path.join(self.readDirectory, 'phillipines', 'phillipines_5070115700.*') for shapefile_part in glob(shapefile_basename): try: copy( shapefile_part, path.join(self.writeDirectory, path.basename(shapefile_part))) except OSError: pass # Create Test DB sqlalchemy_url, sql_engine = dbt.init_sqlite_memory() # Create DB Sessions self.db_session = dbt.create_session(sqlalchemy_url, sql_engine) # Instantiate GSSHAPY object for reading to database self.project_manager = ProjectFile(name='grid_standard_ele') # read project file self.project_manager.readInput(directory=self.gssha_project_directory, projectFileName=self.gssha_project_file, session=self.db_session) self.ele_file = ElevationGridFile(project_file=self.project_manager, session=self.db_session) chdir(self.gssha_project_directory) def test_generate_elevation_grid(self): """ Tests generating an elevation grid from raster """ self.ele_file.generateFromRaster(self.elevation_path, self.shapefile_path) # WRITE OUT UPDATED GSSHA PROJECT FILE self.project_manager.writeInput(session=self.db_session, directory=self.gssha_project_directory, name='grid_standard_ele') # compare ele new_mask_grid = path.join(self.gssha_project_directory, 'grid_standard_ele.ele') compare_msk_file = path.join(self.compare_path, 'grid_standard_ele.ele') self._compare_files(compare_msk_file, new_mask_grid, raster=True) # compare project files generated_prj_file = path.join(self.gssha_project_directory, 'grid_standard_ele.prj') compare_prj_file = path.join(self.compare_path, 'grid_standard_ele.prj') self._compare_files(generated_prj_file, compare_prj_file)
def test_generate_basic_project_land_cover(self): """ Tests generating a basic GSSHA project with land cover """ chdir(self.gssha_project_directory) project_name = "grid_standard_basic_land_cover" # Instantiate GSSHAPY object for reading to database project_manager = ProjectFile(name=project_name, map_type=1) self.db_session.add(project_manager) self.db_session.commit() # ADD MASK mask_name = '{0}.msk'.format(project_name) msk_file = WatershedMaskFile(project_file=project_manager, session=self.db_session) msk_file.generateFromWatershedShapefile( self.shapefile_path, cell_size=1000, out_raster_path=mask_name, ) # ADD ELEVATION FILE ele_file = ElevationGridFile(project_file=project_manager, session=self.db_session) ele_file.generateFromRaster(self.elevation_path, self.shapefile_path) # ADD ROUGHNESS FROM LAND COVER # see http://www.gsshawiki.com/Project_File:Overland_Flow_%E2%80%93_Required mapTableFile = MapTableFile(project_file=project_manager) mapTableFile.addRoughnessMapFromLandUse( "roughness", self.db_session, self.land_use_grid, land_use_grid_id='glcf', ) # ADD ADDITIONAL REQUIRED FILES # see http://www.gsshawiki.com/Project_File:Required_Inputs project_manager.setCard('TOT_TIME', '180') project_manager.setCard('TIMESTEP', '10') project_manager.setCard('HYD_FREQ', '15') # see http://www.gsshawiki.com/Project_File:Output_Files_%E2%80%93_Required project_manager.setCard('SUMMARY', '{0}.sum'.format(project_name), add_quotes=True) project_manager.setCard('OUTLET_HYDRO', '{0}.otl'.format(project_name), add_quotes=True) # see http://www.gsshawiki.com/Project_File:Rainfall_Input_and_Options_%E2%80%93_Required project_manager.setCard('PRECIP_UNIF', '') project_manager.setCard('RAIN_INTENSITY', '2.4') project_manager.setCard('RAIN_DURATION', '30') project_manager.setCard('START_DATE', '2017 02 28') project_manager.setCard('START_TIME', '14 33') # write data project_manager.writeInput(session=self.db_session, directory=self.gssha_project_directory, name=project_name) # compare main project files self._compare_basic_model_idx_maps(project_name)
def test_generate_basic_project(self): """ Tests generating a basic GSSHA project """ chdir(self.gssha_project_directory) project_name = "grid_standard_basic" # Instantiate GSSHAPY object for reading to database project_manager = ProjectFile(name=project_name, map_type=1) self.db_session.add(project_manager) self.db_session.commit() # ADD MASK mask_name = '{0}.msk'.format(project_name) msk_file = WatershedMaskFile(project_file=project_manager, session=self.db_session) msk_file.generateFromWatershedShapefile( self.shapefile_path, cell_size=1000, out_raster_path=mask_name, ) # ADD ELEVATION FILE ele_file = ElevationGridFile(project_file=project_manager, session=self.db_session) ele_file.generateFromRaster(self.elevation_path, self.shapefile_path) # ADD OUTLET POINT project_manager.setOutlet(col=0, row=6, outslope=0.002) # ADD ADDITIONAL REQUIRED FILES # see http://www.gsshawiki.com/Project_File:Required_Inputs project_manager.setCard('TOT_TIME', '180') project_manager.setCard('TIMESTEP', '10') project_manager.setCard('HYD_FREQ', '15') # see http://www.gsshawiki.com/Project_File:Output_Files_%E2%80%93_Required project_manager.setCard('SUMMARY', '{0}.sum'.format(project_name), add_quotes=True) project_manager.setCard('OUTLET_HYDRO', '{0}.otl'.format(project_name), add_quotes=True) # see http://www.gsshawiki.com/Project_File:Overland_Flow_%E2%80%93_Required project_manager.setCard('MANNING_N', '0.013') # see http://www.gsshawiki.com/Project_File:Rainfall_Input_and_Options_%E2%80%93_Required project_manager.setCard('PRECIP_UNIF', '') project_manager.setCard('RAIN_INTENSITY', '2.4') project_manager.setCard('RAIN_DURATION', '30') project_manager.setCard('START_DATE', '2017 02 28') project_manager.setCard('START_TIME', '14 33') # write data project_manager.writeInput(session=self.db_session, directory=self.gssha_project_directory, name=project_name) # compare self._compare_basic_model(project_name)
from gsshapy.lib import db_tools as dbt sqlalchemy_url = dbt.sqlalchemy_url = dbt.init_postgresql_db( username='******', host='localhost', database='gsshapy_tutorial', port='5432', password='******') # Create SQLAlchemy session object for db interaction session = dbt.create_session(sqlalchemy_url) # Read Files to a Database --------------------------------------------------------------------------------------------# # Instantiate ProjectFile file object from gsshapy.orm import ProjectFile projectFile = ProjectFile() # Read file into database readDirectory = '/path_to/tutorial-data' filename = 'parkcity.prj' projectFile.read(directory=readDirectory, filename=filename, session=session) # Inspect supporting objects projectCards = projectFile.projectCards for card in projectCards: print card for card in projectCards: print card.name, card.value