Esempio n. 1
0
def test_array_grid(prep):
    """
    Test array grid
    """
    input_raster, compare_path = prep
    ggrid = GDALGrid(input_raster)

    arrg = ArrayGrid(in_array=ggrid.np_array(masked=True),
                     wkt_projection=ggrid.wkt,
                     geotransform=ggrid.geotransform)

    assert_almost_equal(ggrid.geotransform, arrg.geotransform)
    assert ggrid.x_size == arrg.x_size
    assert ggrid.y_size == arrg.y_size
    assert ggrid.proj4 == arrg.proj4
    assert (ggrid.np_array() == arrg.np_array()).all()
Esempio n. 2
0
def test_array_grid3d(prep):
    """
    Test array grid 3d version
    """
    input_raster, compare_path = prep
    ggrid = GDALGrid(input_raster)
    orig_array = ggrid.np_array(masked=True)
    grid_array = np.array([orig_array, 5 * orig_array, 4 * orig_array])
    arrg = ArrayGrid(in_array=grid_array,
                     wkt_projection=ggrid.wkt,
                     geotransform=ggrid.geotransform)

    assert_almost_equal(ggrid.geotransform, arrg.geotransform)
    assert ggrid.x_size == arrg.x_size
    assert ggrid.y_size == arrg.y_size
    assert ggrid.proj4 == arrg.proj4
    assert arrg.num_bands == 3
    assert (arrg.np_array(band='all') == grid_array).all()
Esempio n. 3
0
def test_array_grid_nodata(prep):
    """
    Test array grid with nodata
    """
    input_raster, compare_path = prep
    ggrid = GDALGrid(input_raster)
    gnodata = ggrid.dataset.GetRasterBand(1).GetNoDataValue()
    arrg = ArrayGrid(in_array=ggrid.np_array(),
                     wkt_projection=ggrid.wkt,
                     geotransform=ggrid.geotransform,
                     nodata_value=gnodata)

    anodata = arrg.dataset.GetRasterBand(1).GetNoDataValue()
    assert gnodata == anodata
    assert_almost_equal(ggrid.geotransform, arrg.geotransform)
    assert ggrid.x_size == arrg.x_size
    assert ggrid.y_size == arrg.y_size
    assert ggrid.proj4 == arrg.proj4
    assert (ggrid.np_array() == arrg.np_array()).all()
Esempio n. 4
0
def test_array_grid3d_nodata(prep):
    """
    Test array grid 3d version with nodata
    """
    input_raster, compare_path = prep
    ggrid = GDALGrid(input_raster)
    gnodata = ggrid.dataset.GetRasterBand(1).GetNoDataValue()
    orig_array = ggrid.np_array(masked=True)
    grid_array = np.array([orig_array, 5 * orig_array, 4 * orig_array])
    arrg = ArrayGrid(in_array=grid_array,
                     wkt_projection=ggrid.wkt,
                     geotransform=ggrid.geotransform,
                     nodata_value=gnodata)

    assert_almost_equal(ggrid.geotransform, arrg.geotransform)
    assert ggrid.x_size == arrg.x_size
    assert ggrid.y_size == arrg.y_size
    assert ggrid.proj4 == arrg.proj4
    assert arrg.num_bands == 3
    assert (arrg.np_array(band='all') == grid_array).all()

    for band_id in range(1, arrg.num_bands + 1):
        anodata = arrg.dataset.GetRasterBand(band_id).GetNoDataValue()
        assert gnodata == anodata
Esempio n. 5
0
def test_gdal_grid_projection(prep, tgrid):
    """
    Tests test_gdal_grid_projection
    """
    input_raster, compare_path = prep
    compare_projection_file = path.join(compare_path, 'test_projection.prj')
    ggrid = GDALGrid(input_raster, compare_projection_file)

    # check properties
    assert_almost_equal(ggrid.geotransform,
                        (120.99986111111112, 0.008333333333333333, 0.0,
                         16.008194444444445, 0.0, -0.008333333333333333))
    assert ggrid.x_size == 120
    assert ggrid.y_size == 120
    assert ggrid.num_bands == 1
    assert ggrid.wkt == ('GEOGCS["WGS 84",DATUM["WGS_1984",'
                         'SPHEROID["WGS 84",6378137,298.257223563,'
                         'AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG",'
                         '"6326"]],PRIMEM["Greenwich",0],UNIT["degree",'
                         '0.0174532925199433],AUTHORITY["EPSG","4326"]]')
    assert ggrid.proj4 == '+proj=longlat +datum=WGS84 +no_defs '
Esempio n. 6
0
def test_to_polgon(prep, tgrid):
    """This method tests the process of converting a raster to a polygon."""
    input_raster, compare_path = prep
    ggrid = GDALGrid(input_raster)

    # check write functions
    shapefile_name = 'test_polygon.shp'
    out_shapefile = path.join(tgrid.write, shapefile_name)
    ggrid.to_polygon(out_shapefile)
    compare_shapefile = path.join(compare_path, shapefile_name)
    compare_files(compare_shapefile, out_shapefile, shapefile=True)

    # check write with mask functions
    shapefile_name = 'test_polygon_mask.shp'
    out_shapefile = path.join(tgrid.write, shapefile_name)
    ggrid.to_polygon(out_shapefile, self_mask=True)
    compare_shapefile = path.join(compare_path, shapefile_name)
    compare_files(compare_shapefile, out_shapefile, shapefile=True)
Esempio n. 7
0
def x_section_from_latlon(elevation_file,
                          x_section_lat0,
                          x_section_lon0,
                          x_section_lat1,
                          x_section_lon1,
                          as_polygon=False,
                          auto_clean=False):

    """
    This workflow extracts a cross section from a DEM
    based on the input latitude and longitude point pairs.

    Parameters:
    -----------
    elevation_file: str
        Path to the elevation DEM.
    x_section_lat0: float
        THe first coordinate latitude.
    x_section_lon0: float
        THe first coordinate longitude.
    x_section_lat1: float
        THe second coordinate latitude.
    x_section_lon1: float
        THe second coordinate longitude.
    as_polygon: bool, optional
        If True, will return cross section as a
        :obj:`shapely.geometry.Polygon`. Default is False.
    auto_clean: bool, optional
        If True, will attempt to clean any issues from the polygon.
        Default is False.

    Returns:
    --------
    list or :obj:`shapely.geometry.Polygon`
        Cross section information.
        The list will be xy coordinate pairs.


    Example::

        from shapely.geometry import Polygon
        from xman.xsect import x_section_from_latlon


        elevation_file = '/path/to/elevation.tif'
        lat1 = 34.105265417341442
        lon1 = 38.993958690587505
        lat2 = 34.107264451129197
        lon2 = 38.99355588515526)
        x_sect_list = x_section_from_latlon(elevation_file,
                                            lat1,
                                            lon1,
                                            lat2,
                                            lon2)

    """
    utm_proj = utm_proj_from_latlon(x_section_lat0, x_section_lon0,
                                    as_osr=True)
    sp_ref = osr.SpatialReference()
    sp_ref.ImportFromEPSG(4326)
    geo_to_utm_trans = osr.CoordinateTransformation(sp_ref, utm_proj)

    x_line_m = LineString((
        geo_to_utm_trans.TransformPoint(x_section_lon0, x_section_lat0)[:2],
        geo_to_utm_trans.TransformPoint(x_section_lon1, x_section_lat1)[:2]
    ))

    elevation_utm_ggrid = GDALGrid(elevation_file).to_projection(utm_proj)

    x_sect_list = []

    for x_step in np.linspace(0, x_line_m.length, num=20):
        x_point = x_line_m.interpolate(x_step)
        x_sect_list.append((
            x_step, elevation_utm_ggrid.get_val_coord(x_point.x, x_point.y)
        ))

    if as_polygon or auto_clean:
        x_sect_poly = Polygon(x_sect_list)
        if not x_sect_poly.is_valid and auto_clean:
            x_sect_poly = x_sect_poly.buffer(0)
            print("WARNING: Cross section cleaned up.")
            if hasattr(x_sect_poly, 'geoms'):
                if len(x_sect_poly.geoms) > 1:
                    largest_poly = x_sect_poly.geoms[0]
                    for geom_poly in x_sect_poly.geoms[1:]:
                        if geom_poly.area > largest_poly.area:
                            largest_poly = geom_poly
                    x_sect_poly = largest_poly

        if as_polygon:
            return x_sect_poly

        x_coords, y_coords = x_sect_poly.exterior.coords.xy
        return list(zip(x_coords, y_coords))

    return x_sect_list
Esempio n. 8
0
    def __init__(
        self,
        project_directory,
        project_name=None,
        mask_shapefile=None,
        grid_cell_size=None,
        elevation_grid_path=None,
        simulation_timestep=30,
        out_hydrograph_write_frequency=10,
        roughness=None,
        land_use_grid=None,
        land_use_grid_id=None,
        land_use_to_roughness_table=None,
        db_session=None,
        project_manager=None,
    ):

        chdir(project_directory)
        self.project_directory = project_directory
        self.db_session = db_session
        self.project_manager = project_manager

        if project_manager is not None and db_session is None:
            raise ValueError(
                "'db_session' is required to edit existing model if 'project_manager' is given."
            )

        if project_manager is None and db_session is None:
            # Create Test DB
            sqlalchemy_url, sql_engine = dbt.init_sqlite_memory()

            # Create DB Sessions
            self.db_session = dbt.create_session(sqlalchemy_url, sql_engine)

            if project_name is not None and mask_shapefile is None and elevation_grid_path is None:
                # Instantiate GSSHAPY object for reading to database
                self.project_manager = ProjectFile()
                # Call read method
                self.project_manager.readInput(
                    directory=self.project_directory,
                    projectFileName="{0}.prj".format(project_name),
                    session=self.db_session)
            else:
                # generate model
                if None in (project_name, mask_shapefile, elevation_grid_path):
                    raise ValueError(
                        "Need to set project_name, mask_shapefile, "
                        "and elevation_grid_path to generate "
                        "a new GSSHA model.")
                # Instantiate GSSHAPY object for reading to database
                self.project_manager = ProjectFile(name=project_name,
                                                   map_type=0)
                self.db_session.add(self.project_manager)
                self.db_session.commit()

                # ADD BASIC REQUIRED CARDS
                # see http://www.gsshawiki.com/Project_File:Required_Inputs
                self.project_manager.setCard('TIMESTEP',
                                             str(simulation_timestep))
                self.project_manager.setCard(
                    'HYD_FREQ', str(out_hydrograph_write_frequency))
                # see http://www.gsshawiki.com/Project_File:Output_Files_%E2%80%93_Required
                self.project_manager.setCard('SUMMARY',
                                             '{0}.sum'.format(project_name),
                                             add_quotes=True)
                self.project_manager.setCard('OUTLET_HYDRO',
                                             '{0}.otl'.format(project_name),
                                             add_quotes=True)

                # ADD REQUIRED MODEL GRID INPUT
                if grid_cell_size is None:
                    # caluclate cell size from elevation grid if not given
                    # as input from the user
                    ele_grid = GDALGrid(elevation_grid_path)
                    utm_bounds = ele_grid.bounds(as_utm=True)
                    x_cell_size = (utm_bounds[1] -
                                   utm_bounds[0]) / ele_grid.x_size
                    y_cell_size = (utm_bounds[3] -
                                   utm_bounds[2]) / ele_grid.y_size
                    grid_cell_size = min(x_cell_size, y_cell_size)
                    ele_grid = None
                    log.info("Calculated cell size is {grid_cell_size}".format(
                        grid_cell_size=grid_cell_size))

                self.set_mask_from_shapefile(mask_shapefile, grid_cell_size)
                self.set_elevation(elevation_grid_path, mask_shapefile)
                self.set_roughness(
                    roughness=roughness,
                    land_use_grid=land_use_grid,
                    land_use_grid_id=land_use_grid_id,
                    land_use_to_roughness_table=land_use_to_roughness_table,
                )
Esempio n. 9
0
def test_gdal_grid(prep, tgrid):
    """
    Tests test_gdal_grid
    """
    input_raster, compare_path = prep
    ggrid = GDALGrid(input_raster)

    # check properties
    assert_almost_equal(ggrid.geotransform,
                        (120.99986111111112, 0.008333333333333333, 0.0,
                         16.008194444444445, 0.0, -0.008333333333333333))
    assert ggrid.x_size == 120
    assert ggrid.y_size == 120
    assert ggrid.num_bands == 1
    assert ggrid.wkt == ('GEOGCS["WGS 84",DATUM["WGS_1984",'
                         'SPHEROID["WGS 84",6378137,298.257223563,'
                         'AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG",'
                         '"6326"]],PRIMEM["Greenwich",0],UNIT["degree",'
                         '0.0174532925199433],AUTHORITY["EPSG","4326"]]')
    assert ggrid.proj4 == '+proj=longlat +datum=WGS84 +no_defs '
    assert isinstance(ggrid.proj, Proj)
    assert ggrid.epsg == '4326'
    sp_ref = osr.SpatialReference()
    sp_ref.ImportFromEPSG(32651)

    latitude, longitude = ggrid.latlon
    assert latitude.shape == (120, 120)
    assert longitude.shape == (120, 120)
    assert_almost_equal(
        latitude[20:22, 20:22],
        [[15.83736111, 15.83736111], [15.82902778, 15.82902778]])
    assert_almost_equal(
        longitude[20:22, 20:22],
        [[121.17069444, 121.17902778], [121.17069444, 121.17902778]])
    y_coords = ggrid.y_coords
    assert y_coords.shape == (120, )
    assert_almost_equal(y_coords[20:22], [15.83736111, 15.82902778])
    x_coords = ggrid.x_coords
    assert x_coords.shape == (120, )
    assert_almost_equal(x_coords[20:22], [121.17069444, 121.17902778])
    # check functions
    assert_almost_equal(ggrid.bounds(),
                        (120.99986111111112, 121.99986111111112,
                         15.008194444444445, 16.008194444444445))
    assert_almost_equal(ggrid.bounds(as_geographic=True),
                        (120.99986111111112, 121.99986111111112,
                         15.008194444444445, 16.008194444444445))
    assert_almost_equal(ggrid.bounds(as_utm=True),
                        (284940.2424665766, 393009.70510977274,
                         1659170.2715823832, 1770872.3212051827))
    assert_almost_equal(ggrid.bounds(as_projection=sp_ref),
                        (284940.2424665766, 393009.70510977274,
                         1659170.2715823832, 1770872.3212051827))
    x_loc, y_loc = ggrid.pixel2coord(5, 10)
    assert_almost_equal((x_loc, y_loc),
                        (121.04569444444445, 15.920694444444445))

    assert ggrid.coord2pixel(x_loc, y_loc) == (5, 10)
    lon, lat = ggrid.pixel2lonlat(5, 10)
    assert_almost_equal((lon, lat), (121.04569444444445, 15.920694444444445))
    assert ggrid.lonlat2pixel(lon, lat) == (5, 10)

    with pytest.raises(IndexError):
        x_loc, y_loc = ggrid.pixel2coord(500000, 10)

    with pytest.raises(IndexError):
        x_loc, y_loc = ggrid.pixel2coord(5, 10000000)

    with pytest.raises(IndexError):
        x_coord, y_coord = ggrid.coord2pixel(1870872, 1669170)

    with pytest.raises(IndexError):
        x_coord, y_coord = ggrid.coord2pixel(284940, 10000000)

    val_default = ggrid.get_val(5, 10)
    assert val_default == 337
    val_latlon = ggrid.get_val_latlon(121.04569444444445, 15.920694444444445)
    assert val_latlon == 337
    val_coord = ggrid.get_val_coord(121.04569444444445, 15.920694444444445)
    assert val_coord == 337

    # check write functions
    projection_name = 'test_projection.prj'
    out_projection_file = path.join(tgrid.write, projection_name)
    ggrid.write_prj(out_projection_file)
    compare_projection_file = path.join(compare_path, projection_name)
    compare_files(compare_projection_file, out_projection_file)

    tif_name = 'test_tif.tif'
    out_tif_file = path.join(tgrid.write, tif_name)
    ggrid.to_tif(out_tif_file)
    compare_tif_file = path.join(compare_path, tif_name)
    compare_files(out_tif_file, compare_tif_file, raster=True)

    tif_prj_name = 'test_tif_32651.tif'
    out_tif_file = path.join(tgrid.write, tif_prj_name)
    proj_grid = ggrid.to_projection(sp_ref)
    proj_grid.to_tif(out_tif_file)
    compare_tif_file = path.join(compare_path, tif_prj_name)
    compare_files(out_tif_file, compare_tif_file, raster=True)

    grass_name = 'test_grass_ascii.asc'
    out_grass_file = path.join(tgrid.write, grass_name)
    ggrid.to_grass_ascii(out_grass_file)
    compare_grass_file = path.join(compare_path, grass_name)
    compare_files(out_grass_file, compare_grass_file, raster=True)

    arc_name = 'test_arc_ascii.asc'
    out_arc_file = path.join(tgrid.write, arc_name)
    ggrid.to_arc_ascii(out_arc_file)
    compare_arc_file = path.join(compare_path, arc_name)
    compare_files(out_arc_file, compare_arc_file, raster=True)
Esempio n. 10
0
    def __init__(self,
                 project_directory,
                 project_name=None,
                 mask_shapefile=None,
                 auto_clean_mask_shapefile=False,
                 grid_cell_size=None,
                 elevation_grid_path=None,
                 simulation_timestep=30,
                 out_hydrograph_write_frequency=10,
                 roughness=None,
                 land_use_grid=None,
                 land_use_grid_id=None,
                 land_use_to_roughness_table=None,
                 load_rasters_to_db=True,
                 db_session=None,
                 project_manager=None,
                ):

        self.project_directory = project_directory
        self.db_session = db_session
        self.project_manager = project_manager
        self.load_rasters_to_db = load_rasters_to_db

        if project_manager is not None and db_session is None:
            raise ValueError("'db_session' is required to edit existing model if 'project_manager' is given.")

        if project_manager is None and db_session is None:

            if project_name is not None and mask_shapefile is None and elevation_grid_path is None:
                self.project_manager, db_sessionmaker = \
                    dbt.get_project_session(project_name, self.project_directory)
                self.db_session = db_sessionmaker()
                self.project_manager.readInput(directory=self.project_directory,
                                               projectFileName="{0}.prj".format(project_name),
                                               session=self.db_session)
            else:
                # generate model
                if None in (project_name, mask_shapefile, elevation_grid_path):
                    raise ValueError("Need to set project_name, mask_shapefile, "
                                     "and elevation_grid_path to generate "
                                     "a new GSSHA model.")

                self.project_manager, db_sessionmaker = \
                    dbt.get_project_session(project_name, self.project_directory, map_type=0)
                self.db_session = db_sessionmaker()
                self.db_session.add(self.project_manager)
                self.db_session.commit()

                # ADD BASIC REQUIRED CARDS
                # see http://www.gsshawiki.com/Project_File:Required_Inputs
                self.project_manager.setCard('TIMESTEP',
                                             str(simulation_timestep))
                self.project_manager.setCard('HYD_FREQ',
                                             str(out_hydrograph_write_frequency))
                # see http://www.gsshawiki.com/Project_File:Output_Files_%E2%80%93_Required
                self.project_manager.setCard('SUMMARY',
                                             '{0}.sum'.format(project_name),
                                             add_quotes=True)
                self.project_manager.setCard('OUTLET_HYDRO',
                                             '{0}.otl'.format(project_name),
                                             add_quotes=True)

                # ADD REQUIRED MODEL GRID INPUT
                if grid_cell_size is None:
                    # caluclate cell size from elevation grid if not given
                    # as input from the user
                    ele_grid = GDALGrid(elevation_grid_path)
                    utm_bounds = ele_grid.bounds(as_utm=True)
                    x_cell_size = (utm_bounds[1] - utm_bounds[0])/ele_grid.x_size
                    y_cell_size = (utm_bounds[3] - utm_bounds[2])/ele_grid.y_size
                    grid_cell_size = min(x_cell_size, y_cell_size)
                    ele_grid = None
                    log.info("Calculated cell size is {grid_cell_size}"
                             .format(grid_cell_size=grid_cell_size))

                if auto_clean_mask_shapefile:
                    mask_shapefile = self.clean_boundary_shapefile(mask_shapefile)

                self.set_mask_from_shapefile(mask_shapefile, grid_cell_size)
                self.set_elevation(elevation_grid_path, mask_shapefile)
                self.set_roughness(roughness=roughness,
                                   land_use_grid=land_use_grid,
                                   land_use_grid_id=land_use_grid_id,
                                   land_use_to_roughness_table=land_use_to_roughness_table,
                                   )
Esempio n. 11
0
    def rasterToPolygon(raster_file, polygon_file):
        """
        Converts watershed raster to polygon and then dissolves it.
        It dissolves features based on the LINKNO attribute.
        """
        log("Process: Raster to Polygon ...")
        time_start = datetime.utcnow()
        temp_polygon_file = \
            "{0}_temp.shp".format(
                os.path.splitext(os.path.basename(polygon_file))[0])

        GDALGrid(raster_file).to_polygon(out_shapefile=temp_polygon_file,
                                         fieldname="LINKNO",
                                         self_mask=True)

        log("Time to convert to polygon: {0}".format(datetime.utcnow() -
                                                     time_start))

        log("Dissolving ...")
        time_start_dissolve = datetime.utcnow()
        ogr_polygin_shapefile = ogr.Open(temp_polygon_file)
        ogr_polygon_shapefile_lyr = ogr_polygin_shapefile.GetLayer()
        number_of_features = ogr_polygon_shapefile_lyr.GetFeatureCount()
        polygon_rivid_list = np.zeros(number_of_features, dtype=np.int32)
        for feature_idx, catchment_feature in \
                enumerate(ogr_polygon_shapefile_lyr):
            polygon_rivid_list[feature_idx] = \
                catchment_feature.GetField('LINKNO')

        shp_drv = ogr.GetDriverByName('ESRI Shapefile')
        # Remove output shapefile if it already exists
        if os.path.exists(polygon_file):
            shp_drv.DeleteDataSource(polygon_file)

        dissolve_shapefile = shp_drv.CreateDataSource(polygon_file)
        dissolve_layer = \
            dissolve_shapefile.CreateLayer(
                '',
                ogr_polygon_shapefile_lyr.GetSpatialRef(),
                ogr.wkbPolygon)
        dissolve_layer.CreateField(ogr.FieldDefn('LINKNO', ogr.OFTInteger))
        dissolve_layer_defn = dissolve_layer.GetLayerDefn()

        for unique_rivid in np.unique(polygon_rivid_list):
            # get indices where it is in the polygon
            feature_indices = np.where(polygon_rivid_list == unique_rivid)[0]
            new_feat = ogr.Feature(dissolve_layer_defn)
            new_feat.SetField('LINKNO', int(unique_rivid))

            if len(feature_indices) == 1:
                # write feature to file
                feature = \
                    ogr_polygon_shapefile_lyr.GetFeature(feature_indices[0])
                new_feat.SetGeometry(feature.GetGeometryRef())
            else:
                # dissolve
                dissolve_poly_list = []
                for feature_index in feature_indices:
                    feature = \
                        ogr_polygon_shapefile_lyr.GetFeature(feature_index)
                    feat_geom = feature.GetGeometryRef()
                    dissolve_poly_list.append(
                        shapely_loads(feat_geom.ExportToWkb()))
                dissolve_polygon = cascaded_union(dissolve_poly_list)
                new_feat.SetGeometry(
                    ogr.CreateGeometryFromWkb(dissolve_polygon.wkb))
            dissolve_layer.CreateFeature(new_feat)
        # clean up
        shp_drv.DeleteDataSource(temp_polygon_file)
        log("Time to dissolve: {0}".format(datetime.utcnow() -
                                           time_start_dissolve))
        log("Total time to convert: {0}".format(datetime.utcnow() -
                                                time_start))