def Testing(GCPs, img, outpImg): from skimage.transform import AffineTransform, warp from skimage.measure import ransac from affine import Affine img_info = rt.GetRasterInfo(inputRaster=img) img_array = np.array(img_info["raster"].GetRasterBand(1).ReadAsArray()) map = GCPs[:, 0:2] pixel = GCPs[:, 2:4] model = AffineTransform() model.estimate(src=pixel, dst=map) print(model.params) model_robust, inliers = ransac((pixel, map), AffineTransform, min_samples=3, residual_threshold=1, max_trials=1000) print(model_robust.params) geoTransform = Affine.from_gdal(*img_info["raster"].GetGeoTransform()) # print(geoTransform) geoTransform = Affine.from_gdal(model_robust.params[0, 2], model_robust.params[0, 0], model_robust.params[0, 1], model_robust.params[1, 2], model_robust.params[1, 0], model_robust.params[1, 1]) print(geoTransform) point_A = [] point_B = [] for i, val in enumerate(GCPs): point_A.append(tuple([val[0], val[1]])) point_B.append(tuple([val[2], val[3]])) point_A = tuple(point_A) point_B = tuple(point_B) trn = Affine_Fit(from_pts=point_B, to_pts=point_A) res, tr = trn.To_Str() geoTransform_ = Affine.from_gdal(tr[2][3], tr[0][3], tr[1][3], tr[2][4], tr[0][4], tr[1][4]) print(geoTransform_) # geoTransform = Affine.from_gdal((1,2,3,5)) nrows, ncols = np.shape(img_array) ras_name = gdal.GetDriverByName("GTiff").Create(outpImg, ncols, nrows, 1, gdal.GDT_Float64) ras_name.SetGeoTransform(geoTransform_.to_gdal()) # important part ends wkt = img_info["raster"].GetProjection() ras_name.SetProjection(wkt) ras_name.GetRasterBand(1).WriteArray(img_array, resample_alg=gdal.GRA_Lanczos) resampling_method = gdal.GRA_Lanczos ras_name = None
def open(path, mode='r', width=None, height=None, count=None, transform=None, crs=None, no_data=None, dtype=None, chunks=(256, 256), blocksize=256, compression=1, band_names=None): # should we pass to different read write classes based on the mode? if mode == 'r': fid = h5py.File(path, mode) ds = KeaH5RDOnly(fid) elif mode == 'r+': fid = h5py.File(path, mode) ds = KeaH5RW(fid) elif mode =='w': # Check we have all the necessary creation options if (width is None) or (height is None): msg = "Error. Both width and height must be specified." raise ValueError(msg) if dtype is None: msg = "Error. The dtype must be specifified." raise ValueError(msg) if count is None: msg = "Error. The count must be specified." raise ValueError(msg) # If we have no transform, default to image co-ordinates if (transform is None) or (crs is None): ul = (0, 0) rot = (0, 0) res = (1, -1) transform = Affine.from_gdal(*[0.0, 1.0, 0.0, 0.0, 0.0, -1.0]) crs = "" if (chunks[0] > height) or (chunks[1] > width): msg = "The chunks must not exceed the width or height." raise ValueError(msg) # we'll use rasterio's proj4 dict mapping if not isinstance(crs, dict): msg = "Error. The crs is not a valid proj4 dict style mapping." raise ValueError(msg) # we'll follow rasterio in using an affine if not isinstance(transform, Affine): msg = "Error. The transform is not an Affine instance." transform = Affine.from_gdal(*transform) fid = h5py.File(path, mode) create_kea_image(fid, width, height, count, transform, crs, no_data, dtype, chunks, blocksize, compression, band_names) ds = KeaH5RW(fid) return ds
def test_to_raster__preserve_profile__none_nodata(request, tmpdir): tmp_raster = tmpdir.join("output_profile.tif") input_raster = tmpdir.join("input_profile.tif") transform = Affine.from_gdal(0, 512, 0, 0, 0, 512) with rasterio.open( str(input_raster), "w", driver="GTiff", height=512, width=512, count=1, crs="+init=epsg:4326", transform=transform, dtype=rasterio.float32, tiled=True, tilexsize=256, tileysize=256, ) as rds: rds.write(numpy.empty((1, 512, 512), dtype=numpy.float32)) with request.param(str(input_raster)) as mda: mda.rio.to_raster(str(tmp_raster)) with rasterio.open(str(tmp_raster)) as rds, rasterio.open( str(input_raster)) as rdc: assert rds.count == rdc.count assert rds.crs == rdc.crs assert_array_equal(rds.transform, rdc.transform) assert_array_equal(rds.nodata, rdc.nodata) assert_array_equal(rds.read(), rdc.read()) assert rds.profile == rdc.profile assert rds.nodata is None
def get_geo_transform(raster_src): """Get the geotransform for a raster image source. Arguments --------- raster_src : str, :class:`rasterio.DatasetReader`, or `osgeo.gdal.Dataset` Path to a raster image with georeferencing data to apply to `geom`. Alternatively, an opened :class:`rasterio.Band` object or :class:`osgeo.gdal.Dataset` object can be provided. Required if not using `affine_obj`. Returns ------- transform : :class:`affine.Affine` An affine transformation object to the image's location in its CRS. """ if isinstance(raster_src, str): affine_obj = rasterio.open(raster_src).transform elif isinstance(raster_src, rasterio.DatasetReader): affine_obj = raster_src.transform elif isinstance(raster_src, gdal.Dataset): affine_obj = Affine.from_gdal(*raster_src.GetGeoTransform()) return affine_obj
def write_grid(grid, fname, origin, spacing): """ Write an epipolar resampling grid to file :param grid: the grid to write :type grid: 3D numpy array :param fname: the filename to which the grid will be written :type fname: string :param origin: origin of the grid :type origin: (float, float) :param spacing: spacing of the grid :type spacing: (float, float) """ geotransform = (origin[0] - 0.5 * spacing[0], spacing[0], 0.0, origin[1] - 0.5 * spacing[1], 0.0, spacing[1]) transform = Affine.from_gdal(*geotransform) with rio.open(fname, 'w', height=grid.shape[0], width=grid.shape[1], count=2, driver='GTiff', dtype=grid.dtype, transform=transform) as dst: dst.write_band(1, grid[:, :, 0]) dst.write_band(2, grid[:, :, 1])
def coord2pixel(x_coord, y_coord, dataset): """Returns base-0 raster index using global coordinates to pixel center Parameters ---------- x_coord: float The projected x coordinate of the cell center. y_coord: float The projected y coordinate of the cell center. Returns ------- :obj:`tuple` (col, row) - The 0-based column and row index of the pixel. """ affine = Affine.from_gdal(*dataset.GetGeoTransform()) col, row = ~affine * (x_coord, y_coord) if col > dataset.RasterXSize or col < 0: raise IndexError("Longitude {0} is out of bounds ..." .format(x_coord)) if row > dataset.RasterYSize or row < 0: raise IndexError("Latitude {0} is out of bounds ..." .format(y_coord)) return int(col), int(row)
def pointExtract(self, geopnts): ''' Inputs: ------------------ :raster - gdal object :geopnts - geopandas vector object ''' self._validateType(self.raster, 'raster') self._validateType(geopnts, 'vector') geotrans= self.raster.geotransform forward_transform= Affine.from_gdal(*geotrans) values= {'lats': [], 'lons': [], 'samples': []} for i in range(len(geopnts.layer)): # print(np.array(geopnts.layer.geometry[i]).squeeze()) geometry= np.array(geopnts.layer.geometry[i]).squeeze() res = ~forward_transform* geometry x,y = int(res[0]), int(res[1]) values['lats'].append(geometry[0]) values['lons'].append(geometry[1]) values['samples'].append(self._pointsampling(self.raster, x, y)) return values
def extract_point_from_raster(point, data_source, band_number=1): """Return floating-point value that corresponds to given point.""" pppoint = PassepartoutPoint(point) # Convert point co-ordinates so that they are in same projection as raster target_srs_wkt = data_source.GetProjection() try: pppoint.transform_to(target_srs_wkt) except GDALException: raise RuntimeError("Couldn't convert point to raster's CRS") infinities = (float("inf"), float("-inf")) if pppoint.x in infinities or pppoint.y in infinities: raise RuntimeError("Couldn't convert point to raster's CRS") # Convert geographic co-ordinates to pixel co-ordinates forward_transform = Affine.from_gdal(*data_source.GetGeoTransform()) reverse_transform = ~forward_transform px, py = reverse_transform * (pppoint.x, pppoint.y) px, py = int(px + 0.5), int(py + 0.5) # Extract pixel value band = data_source.GetRasterBand(band_number) structval = band.ReadRaster(px, py, 1, 1, buf_type=gdal.GDT_Float32) result = struct.unpack("f", structval)[0] if result == band.GetNoDataValue(): result = float("nan") return result
def test_data_dir_2(tmpdir): kwargs = { "crs": { 'init': 'epsg:4326' }, "transform": Affine.from_gdal(-114, 0.2, 0, 46, 0, -0.2), "count": 4, "dtype": rasterio.uint8, "driver": "GTiff", "width": 10, "height": 10 } with rasterio.Env(): with rasterio.open(str(tmpdir.join('b.tif')), 'w', **kwargs) as dst: data = numpy.zeros((4, 10, 10), dtype=rasterio.uint8) data[0:3, 0:6, 0:6] = 255 data[3, 0:6, 0:6] = 255 dst.write(data) with rasterio.open(str(tmpdir.join('a.tif')), 'w', **kwargs) as dst: data = numpy.ones((4, 10, 10), dtype=rasterio.uint8) data[3, :, :] = 255 # no nodata dst.write(data) return tmpdir
def get_cropped_profile(profile: dict, slice_x: slice, slice_y: slice): """ slice_x and slice_y are numpy slices """ x_start = slice_x.start or 0 y_start = slice_y.start or 0 x_stop = slice_x.stop or profile['width'] y_stop = slice_y.stop or profile['height'] width = x_stop - x_start height = y_stop - y_start profile_cropped = profile.copy() trans = profile['transform'] x_cropped, y_cropped = xy(trans, y_start, x_start, offset='ul') trans_list = list(trans.to_gdal()) trans_list[0] = x_cropped trans_list[3] = y_cropped tranform_cropped = Affine.from_gdal(*trans_list) profile_cropped['transform'] = tranform_cropped profile_cropped['height'] = height profile_cropped['width'] = width return profile_cropped
def get_mask(): """ Gets PSN Arctic region mask """ src_dir = '/disks/sidads_ftp/DATASETS/NOAA/G02135/seaice_analysis' src_file = 'Arctic_region_mask_Meier_AnnGlaciol2007.msk' nrow, ncol = (448, 304) mask = np.fromfile(os.path.join(src_dir, src_file), dtype='byte').reshape(nrow, ncol) # msk = np.rot90(msk,2) # msk = np.flipud(msk) geo_transform = [-3850000.000, 25000., 0., 5850000.000, 0., -25000.] # GDAL style geotransform transform = Affine.from_gdal(*geo_transform) x, _ = (np.arange(ncol) + 0.5, np.zeros(ncol) + 0.5) * transform _, y = (np.zeros(nrow) + 0.5, np.arange(nrow) + 0.5) * transform # x = [(a * (ic, 0))[0] for ic in np.arange(ncol)] # y = [(a * (0, ir))[1] for ir in np.arange(nrow)] da = xr.DataArray(mask, coords={'x': x, 'y': y}, dims=['y', 'x']) #da = xr.DataArray(msk, dims=['y','x']) return da
def point_index_from_grid(gdf, dem_path): # load geo raster and get pixel centers da = xr.open_rasterio(dem_path) transform = Affine.from_gdal(*da.transform) nx, ny = da.sizes['x'], da.sizes['y'] x, y = transform * np.meshgrid(np.arange(nx) + 0.5, np.arange(ny) + 0.5) # put point data into projection of gridded data new = gdf.to_crs(da.crs[6:]) #station index x_idx = [] y_idx = [] for i in range(len(new)): minx = abs(new.geometry.x[i] - da.x.values) x = np.where(minx == min(abs(new.geometry.x[i] - da.x.values)))[0][0] x_idx.append(x) # flip y values to align with cartesian coordinates miny = abs(new.geometry.y[i] - np.flip(da.y.values)) y = np.where(miny == min(abs(new.geometry.y[i] - np.flip(da.y.values))))[0][0] y_idx.append(y) gdf['x_idx'] = x_idx gdf['y_idx'] = y_idx return gdf
def write_measurement_h5( p: DatasetAssembler, name: str, g: h5py.Dataset, overviews=images.DEFAULT_OVERVIEWS, overview_resampling=Resampling.nearest, expand_valid_data=True, file_id: str = None, ): """ Write a measurement by copying it from a hdf5 dataset. """ if hasattr(g, "chunks"): data = g[:] else: data = g p.write_measurement_numpy( name=name, array=data, grid_spec=images.GridSpec( shape=g.shape, transform=Affine.from_gdal(*g.attrs["geotransform"]), crs=CRS.from_wkt(g.attrs["crs_wkt"]), ), nodata=(g.attrs.get("no_data_value")), overviews=overviews, overview_resampling=overview_resampling, expand_valid_data=expand_valid_data, file_id=file_id, )
def heads_to_raster( headsfile, rasterfolder, transform, noflow=1e30, driver='GTiff', epsg=28992, ): # create output folder rasterfolder.mkdir(exist_ok=True) hds = flopy.utils.binaryfile.HeadFile(headsfile) heads = np.ma.masked_equal(hds.get_data(), noflow) for layer, iheads in enumerate(heads[::2]): log.debug('exporting heads layer {layer:d}'.format(layer=layer + 1)) rasterfile = rasterfolder / 'heads_l{layer:02d}.tif'.format( layer=layer + 1, ) width, height = iheads.shape profile = { 'driver': driver, 'width': width, 'height': height, 'count': 1, 'transform': Affine.from_gdal(*transform), 'dtype': iheads.dtype, } write_raster(rasterfile, iheads, profile)
def test_rasterio_handles(): """ Test to create a file handle(s) depending on whether the output data has a color layer or not. """ bounds = (675248.0, 4897075.0, 675460.5, 4897173.0) resolution = 0.5 geotransform = (bounds[0], resolution, 0.0, bounds[3], 0.0, -resolution) transform = Affine.from_gdal(*geotransform) rio_params = dict( height=196, width=425, driver='GTiff', dtype=np.float32, transform=transform, crs='EPSG:{}'.format(32631), tiled=True ) dsm_no_data = -32768 color_no_data = 0 nb_bands = 1 # Create file handles with tempfile.TemporaryDirectory(dir=temporary_dir()) as directory: dsm_file = os.path.join(directory, 'dsm.tif') clr_file = os.path.join(directory, 'clr.tif') file_handles = readwrite.rasterio_handles(['hgt', 'clr'], [dsm_file, clr_file], [rio_params, rio_params], [dsm_no_data, color_no_data], [1, nb_bands]) with file_handles as rio_handles: assert isinstance(rio_handles, dict) == True assert 'hgt' in rio_handles.keys() and 'clr' in rio_handles.keys() for key in rio_handles.keys(): assert isinstance(rio_handles[key], rio.io.DatasetWriter)
def test_data_dir_1(tmpdir): kwargs = { "crs": { "init": "epsg:4326" }, "transform": Affine.from_gdal(-114, 0.2, 0, 46, 0, -0.2), "count": 4, "dtype": rasterio.uint8, "driver": "GTiff", "width": 10, "height": 10, } with rasterio.Env(): with rasterio.open(str(tmpdir.join("b.tif")), "w", **kwargs) as dst: data = numpy.zeros((4, 10, 10), dtype=rasterio.uint8) data[0:3, 0:6, 0:6] = 255 data[3, 0:6, 0:6] = 255 dst.write(data) with rasterio.open(str(tmpdir.join("a.tif")), "w", **kwargs) as dst: data = numpy.zeros((4, 10, 10), dtype=rasterio.uint8) data[0:3, 4:8, 4:8] = 254 data[3, 4:8, 4:8] = 255 dst.write(data) return tmpdir
def compute_stats(image, geoms): # Get Affine object in order to run zonal_stats aff = Affine.from_gdal(*image.GetGeoTransform()) # Extract values values = image.ReadAsArray() # Get no-data value nodata = image.GetRasterBand(1).GetNoDataValue() # Create WKT representation for each polygon wkts = [] for g in geoms: wkts.append(g.ExportToWkt()) # Compute stats for each polygon stats = zonal_stats(wkts, values, stats=['min', 'max', 'count', 'std'], affine=aff, nodata=nodata, raster_out=False, prefix='', add_stats={'model': model}, geojson_out=True) return stats
def test_data_dir_1(tmpdir): kwargs = { "crs": {'init': 'epsg:4326'}, "transform": Affine.from_gdal(-114, 0.2, 0, 46, 0, -0.2), "count": 4, "dtype": rasterio.uint8, "driver": "GTiff", "width": 10, "height": 10 } with rasterio.Env(): with rasterio.open(str(tmpdir.join('b.tif')), 'w', **kwargs) as dst: data = numpy.zeros((4, 10, 10), dtype=rasterio.uint8) data[0:3, 0:6, 0:6] = 255 data[3, 0:6, 0:6] = 255 dst.write(data) with rasterio.open(str(tmpdir.join('a.tif')), 'w', **kwargs) as dst: data = numpy.zeros((4, 10, 10), dtype=rasterio.uint8) data[0:3, 4:8, 4:8] = 254 data[3, 4:8, 4:8] = 255 dst.write(data) return tmpdir
def convert_coordinates(geotransform, xy, to_map=True, centre=False): """ Given a tuple containing an (x, y) co-ordinate pair, convert the co-ordinate pair to either image/array co-ordinates or real world (map) co-ordinates. :param geotransform: A list or tuple of length 6 containing a valid GDAL style GeoTransform. :param xy: A tuple containing an (x, y) co-ordinate pair. The pair can be either image/array co-ordinates or map co-ordinates. If xy is a list of tuple co-ordinate pairs, then each (x, y) pair will be converted, eg [(x, y), (x, y), (x, y)]. If image co-ordinates are input, then set to_map=True. If map co-ordinates are input, then set to_map=False. :param to_map: A boolean indicating if the conversion should be image to map or map to image. Default is True (image to map). :param centre: A boolean indicating if the returned co-ordinate pair should be offset by 0.5 indicating the centre of a pixel. Default is False. :return: A tuple containing an (x, y) co-ordinate pair. The returned type will be int if to_map=False and float if to_map=True (Default). If xy is a list of tuple co-ordinate pairs, then a list of (x, y) co-ordinate pairs will be returned, eg [(x, y), (x, y), (x, y)]. """ # define the affine transformation affine = Affine.from_gdal(*geotransform) # If we have a list of tuples otherwise we'll get an int if isinstance(xy[0], collections.Sequence): points = [] if to_map: if centre: xy = [(x + 0.5, y + 0.5) for x, y in xy] for point in xy: xy = point * affine points.append(xy) else: for point in xy: x, y = point * ~affine points.append((int(x), int(y))) return points else: if to_map: if centre: xy = tuple(v + 0.5 for v in xy) x, y = xy * affine else: xy = xy * ~affine x, y = tuple(int(v) for v in xy) return x, y
def clip_land_use_raster(land_use_raster, region_shapefile, output_file): with rasterio.open(land_use_raster) as r: with fiona.open(region_shapefile) as clipper: (w, s, e, n) = clipper.bounds a = r.affine #TODO: need to transform the affine for new clipping (min_col, min_row) = map(int, ~a * (w, n)) (max_col, max_row) = map(int, ~a * (e, s)) w2, n2 = a * (min_col, min_row) new_affine = Affine.from_gdal(w2, 100, 0.0, n2, 0.0, -100) (height,width) = r.read(1, window = ((min_row, max_row), (min_col, max_col))).shape profile = r.profile profile.update({ 'transform': new_affine, 'affine': new_affine, 'height': height, 'width': width }) with rasterio.open(output_file, 'w', **profile) as out: for i in r.indexes: clipped = r.read(i, window = ((min_row, max_row), (min_col, max_col))) #print clipped.shape out.write(clipped, indexes = i)
def to_geojson(self, projection, geotransform): fwd = Affine.from_gdal(*geotransform) min_x_min_y = fwd * (self.min_x, self.min_y) max_x_min_y = fwd * (self.max_x, self.min_y) max_x_max_y = fwd * (self.max_x, self.max_y) min_x_max_y = fwd * (self.min_x, self.max_y) coordinates = [ min_x_min_y, max_x_min_y, max_x_max_y, min_x_max_y, min_x_min_y ] src_proj = osr.SpatialReference() src_proj.ImportFromWkt(projection) dst_proj = osr.SpatialReference() dst_proj.ImportFromEPSG(4326) coord_trans = osr.CoordinateTransformation(src_proj, dst_proj) coordinates = [ coord_trans.TransformPoint(coord[0], coord[1], 0.0)[:2] for coord in coordinates ] return Polygon(coordinates=coordinates)
def _rasterise_gdf(dissolved): """Rasterise the GeoDataFrame dissolved. :param dissolved: A GeoDataFrame in which there is one row for each desired range map. """ with open(SCI_NAME_RASTER_FILENAME_MAPPING_FP, 'a', newline='') as snrfmf: snrfmf_writer = csv.writer(snrfmf) for row in dissolved.itertuples(): sisid_str = str(row.SISID) breeding_str = str(row.BREEDING) uncompressed_filename = map_sisid_breeding_to_filename( sisid_str, breeding_str, True) uncompressed_file_path = os.path.join(RASTER_DIR_PATH, uncompressed_filename) least_longitude = row.geometry.bounds[0] least_latitude = row.geometry.bounds[1] longitude_range = row.geometry.bounds[2] - row.geometry.bounds[0] latitude_range = row.geometry.bounds[3] - row.geometry.bounds[1] pixel_width_float = float(Fraction(PIXEL_WIDTH_STR)) pixel_height_float = float(Fraction(PIXEL_HEIGHT_STR)) width = ceil(longitude_range / pixel_width_float) height = ceil(latitude_range / pixel_height_float) geotransform = (least_longitude, pixel_width_float, 0.0, least_latitude, 0.0, pixel_height_float) transform = Affine.from_gdal(*geotransform) print_w_timestamp('Generating %s...' % uncompressed_filename, end=' ') _generate_raster(uncompressed_file_path, width, height, transform, row.geometry) print('Done.') compressed_filename = map_sisid_breeding_to_filename( sisid_str, breeding_str, False) compressed_file_path = os.path.join(RASTER_DIR_PATH, compressed_filename) print_w_timestamp('Compressing...', end=' ') _compress_raster(uncompressed_file_path, compressed_file_path) print('Done.') # Delete uncompressed raster. os.remove(uncompressed_file_path) # Delete ".tif.aux.xml" file. xml_file_path = compressed_file_path + '.aux.xml' os.remove(xml_file_path) # At this point, I assert that a GeoTIFF has been generated and compressed # successfully. Therefore, a mapping is added. sci_name = str(row.SCINAME) snrfmf_writer.writerow((sci_name, compressed_filename))
def check_vals(data,affine,points=TEST_POINTS): inv_a = Affine.from_gdal(*gdal.InvGeoTransform(affine.to_gdal())) for (coords,val) in points: [col_f,row_f] = inv_a*coords[::-1] [col,row] = [int(col_f),int(row_f)] assert data[row,col]==val
def __init__(self): self.p = pyproj.Proj(self._p_str) self.gt = (self._x0, self._res, 0, self._y0, 0, -self._res) self.fwd = Affine.from_gdal(*self.gt) self.rev = ~self.fwd # to get coords self.x_size = self._res * self._shape[1] self.y_size = self._res * self._shape[0]
def write_geotiff(self, data_array, file_name): assert data_array.ndim == 2 _args = dict(driver='GTiff', height=data_array.shape[0], width=data_array.shape[1], count=1, dtype=data_array.dtype.name, crs=data_array.attrs['crs'], transform=Affine.from_gdal(*data_array.attrs['gt'])) with rio.open(file_name, 'w', **_args) as geotif: geotif.write(data_array.values, 1)
def transform(self): transform = [self.header['TL'][0], self.header['RES'][0], self.header['ROT'][0], self.header['TL'][1], self.header['ROT'][1], self.header['RES'][1]] return Affine.from_gdal(*transform)
def test_wrf_project(wrf): """Test project wrf grid""" with wrf.xd as xd: pgrid = xd.lsm.to_utm('RAINC') # make sure coordinates correct assert 'lat' in pgrid.coords assert 'lon' in pgrid.coords assert 'time' in pgrid.coords # check @property attributes date_array = [ '2016-08-23 22:00:00', '2016-08-23 23:00:00', '2016-08-24 00:00:00', '2016-08-24 01:00:00', '2016-08-24 02:00:00', '2016-08-24 03:00:00', '2016-08-24 04:00:00', '2016-08-24 05:00:00', '2016-08-24 06:00:00', '2016-08-24 07:00:00', '2016-08-24 08:00:00', '2016-08-24 09:00:00', '2016-08-24 10:00:00', '2016-08-24 11:00:00', '2016-08-24 12:00:00', '2016-08-24 13:00:00' ] assert (pgrid.lsm.datetime == pd.to_datetime(date_array)).all() # check projection proj4_str = ('+proj=utm +zone=13 +datum=WGS84 +units=m +no_defs ') compare_proj4(pgrid.lsm.projection.ExportToProj4(), proj4_str) # check other attrs assert pgrid.lsm.epsg == '32613' assert_almost_equal(pgrid.lsm.geotransform, [ -529776.2885911233, 6010.014137057385, 0.0, 4558039.843039687, 0.0, -6010.014137057385 ], decimal=3) assert_almost_equal(pgrid.lsm.dx, 6010.014137057385) assert_almost_equal(pgrid.lsm.dy, 6010.014137057385) assert pgrid.lsm.affine == Affine.from_gdal(*pgrid.lsm.geotransform) assert pgrid.lsm.x_size == 291 assert pgrid.lsm.y_size == 230 lat, lon = pgrid.lsm.latlon assert lat.shape == (230, 291) assert lon.shape == (230, 291) assert_almost_equal(lat[20:23, 145:148], [[40.0494547, 40.0505404, 40.0515833], [39.9953332, 39.9964168, 39.9974578], [39.9412112, 39.9422928, 39.9433317]]) assert_almost_equal( lon[144:147, 15:17], [[-114.9990177, -114.9356954], [-114.9929548, -114.9296693], [-114.9869079, -114.9236591]]) y_coords, x_coords = pgrid.lsm.coords assert y_coords.shape == (230, 291) assert x_coords.shape == (230, 291) assert_almost_equal(x_coords[100:102, 220:223], [[795431.8286, 801441.8428, 807451.8569], [795431.8286, 801441.8428, 807451.8569]], decimal=4) assert_almost_equal(y_coords[100:102, 220:223], [[3954033.4223, 3954033.4223, 3954033.4223], [3948023.4081, 3948023.4081, 3948023.4081]], decimal=4) assert_almost_equal(pgrid.lsm.center, [-106.6965833, 34.8059311])
def get_affine(src): aff = None # See https://github.com/mapbox/rasterio/issues/86 with warnings.catch_warnings(): warnings.simplefilter("ignore") aff = src.transform if isinstance(aff,list): aff = Affine.from_gdal(*aff) return aff
def get_affine(src): aff = None # See https://github.com/mapbox/rasterio/issues/86 with warnings.catch_warnings(): warnings.simplefilter("ignore") aff = src.transform if isinstance(aff, list): aff = Affine.from_gdal(*aff) return aff
def load_grid(self): from tools4msp.models import PartracGrid r = PartracGrid.objects.all()[0] grid = r.rast.bands[0].data() grid[:] = 0 gtransform = Affine.from_gdal(*r.rast.geotransform) proj = r.rast.srs.srid self.grid = rg.RectifiedGrid(grid, proj, gtransform)
def test_convert(): col, row = 0, 100 affine_params = Affine.from_gdal(*(-237481.5, 425.0, 0.0, 237536.4, 0.0, -425.0)) assert (-237481.5, 195036.4) == rowcol2xy((row, col), affine_params) assert (row, col) == xy2rowcol((-237481.5, 195036.4), affine_params, round_function=round)
def concatenate_files( infile_paths: Iterable[str], outfile: str, h5_info: Dict, doy: str, tile_metadata: Dict, ) -> None: """ A function to concatenate multiple h5 files and append metadata information. """ assert len(infile_paths) == 7 # set the uuids used in processing average for given day of year tile_metadata["lineage"]["doy_average"] = [ munge_metadata(h5_info[key]) for key in h5_info if folder_doy(key) == doy ] # Set deterministic UUID from lineage tile_metadata["id"] = str( uuid.uuid5( FALLBACK_NAMESPACE, FALLBACK_PRODUCT_HREF + "&" + urllib.parse.urlencode(tile_metadata["lineage"]), )) geom_mask = None transform = None with atomic_h5_write(Path(outfile), "w") as out_fid: # Sorting works since No. of bands < 10 # note that 3 hdf5 datasets will show consecutively per band for fp in sorted(infile_paths): with h5py.File(fp, "r") as in_fid: for ds_band in in_fid: if "BRDF_Albedo_Parameters_" in ds_band: _band = in_fid[ds_band] if not transform: transform = Affine.from_gdal( *_band.attrs["geotransform"]) nodata_value = _band.attrs["_FillValue"] albedo_params = _band[()] if geom_mask is not None: geom_mask |= np.logical_or( *((albedo_params[layer] != nodata_value) for layer in DTYPE_MAIN.names)) else: geom_mask = np.logical_or( *((albedo_params[layer] != nodata_value) for layer in DTYPE_MAIN.names)) in_fid.copy(source=ds_band, dest=out_fid) # Calculate valid bounds from source ISO, VOL, GEO params tile_metadata["geometry"] = _calculate_valid_bounds( geom_mask, transform) # Write out metadata write_h5_md(out_fid, [tile_metadata], ["/"])
def retrieve_pixel_coords(geo_coord, geot_params): x, y = geo_coord[0], geo_coord[1] forward_transform = Affine.from_gdal(*geot_params) reverse_transform = ~forward_transform px, py = reverse_transform * (x, y) px = np.around(px).astype(int) py = np.around(py).astype(int) pixel_coord = px, py return pixel_coord
def rasterize_geom(geom, src_offset, new_gt, all_touched): geoms = [(geom, 1)] affinetrans = Affine.from_gdal(*new_gt) rv_array = features.rasterize( geoms, out_shape=(src_offset[3], src_offset[2]), transform=affinetrans, fill=0, all_touched=all_touched) return rv_array
def test_gdal(): t = Affine.from_gdal(-237481.5, 425.0, 0.0, 237536.4, 0.0, -425.0) assert t.c == t.xoff == -237481.5 assert t.a == 425.0 assert t.b == 0.0 assert t.f == t.yoff == 237536.4 assert t.d == 0.0 assert t.e == -425.0 assert tuple(t) == (425.0, 0.0, -237481.5, 0.0, -425.0, 237536.4, 0, 0, 1.0) assert t.to_gdal() == (-237481.5, 425.0, 0.0, 237536.4, 0.0, -425.0)
def test_ndarray_affine(): polygons = os.path.join(DATA, 'polygons.shp') arr, gt = _get_raster_array_gt(raster) stats1 = zonal_stats(polygons, arr, transform=gt) from affine import Affine atrans = Affine.from_gdal(*gt) stats2 = zonal_stats(polygons, arr, transform=atrans) assert stats1[0]['count'] == stats2[0]['count'] stats3 = zonal_stats(polygons, arr, affine=gt) assert stats1[0]['count'] == stats3[0]['count']
def guard_transform(transform): """Return an Affine transformation instance""" if not isinstance(transform, Affine): if tastes_like_gdal(transform): warnings.warn( "GDAL-style transforms are deprecated and will not " "be supported in Rasterio 1.0.", FutureWarning, stacklevel=2) transform = Affine.from_gdal(*transform) else: transform = Affine(*transform) return transform
def Raster_to_Polygon(input_file): gdal.UseExceptions() src_ds = gdal.Open(input_file,GA_ReadOnly) print(input_file) #target = osr.SpatialReference() wkt =src_ds.GetProjection() print(wkt) src =osr.SpatialReference() print(src) ds=src.ImportFromWkt(wkt) #srcband = src_ds.GetRasterBand(1) myarray =(src_ds.GetRasterBand(1).ReadAsArray()) #print(myarray) T0 = Affine.from_gdal(*src_ds.GetGeoTransform()) tx=[T0[0], T0[1], T0[2], T0[3],T0[4], T0[5]] print(tx) epsg_code=[] #if (src.IsProjected()): # ds=epsg_code.append(int(src.GetAuthorityCode("PROJCS"))) # print(ds) # else: # epsg_code.append(int(src.GetAuthorityCode("GEOGCS"))) target = osr.SpatialReference() target.ImportFromEPSG(102003) if src_ds is None: #print 'Unable to open %s' % src_filename sys.exit(1) try: srcband = src_ds.GetRasterBand(1) srd=srcband.GetMaskBand() except RuntimeError as e: # for example, try GetRasterBand(10) #print 'Band ( %i ) not found' % band_num #print e sys.exit(1) drv = ogr.GetDriverByName("ESRI Shapefile") if os.path.exists('temp.shp'): drv.DeleteDataSource('temp.shp') dst_layername ='temp' dst_ds = drv.CreateDataSource('temp'+ ".shp" ) print(dst_ds) dst_layer = dst_ds.CreateLayer(dst_layername, srs=target) gdal.Polygonize( srcband,srd, dst_layer, -1, [], callback=None) src_ds=None
def _geofactory(bounds, proj, res, dtype=np.float64, eea=False): if eea: gbounds = calculate_eea_gbounds(bounds, res) else: gbounds = calculate_gbounds(bounds, res) cols = int(round((gbounds[2] - gbounds[0]) / res)) rows = int(round((gbounds[3] - gbounds[1]) / res)) _gtransform = (gbounds[0], res, 0.0, gbounds[3], 0.0, -res) gtransform = Affine.from_gdal(*_gtransform) # we use copy=True in order to avoid sharedmask=True return RectifiedGrid(np.zeros((rows, cols), dtype), proj, gtransform)
def makeCoverageGrid(covshp,geodict): shapes = fiona.open(covshp) geoms = [] for shape in shapes: geoms.append(shape['geometry']) shapes.close() outshape = (geodict['nrows'],geodict['ncols']) transform = Affine.from_gdal(geodict['xmin'],geodict['xdim'],0.0,geodict['ymax'],0.0,-geodict['ydim']) img = features.rasterize(geoms,out_shape=outshape,fill=0, transform=transform,all_touched=True, default_value=1) covgrid = GMTGrid() covgrid.geodict = geodict covgrid.griddata = np.int8(img.copy()) return covgrid
def get_metadata( self ): ''' return a rasterio-style metadata dict for the output file to be created. Only GeoTiff is currently supported. ''' from affine import Affine as A meta = {'affine': A.from_gdal( *self.geotransform ), 'count': 1, 'crs': self.crs, 'driver': u'GTiff', 'dtype': 'uint8', 'height': self.height, 'nodata': self.nodata, 'width': self.width, 'compress': 'lzw'} return meta
def guard_transform(transform): """Return an Affine transformation instance""" if not isinstance(transform, Affine): if tastes_like_gdal(transform): warnings.warn( "GDAL-style transforms are deprecated and will not " "be supported in Rasterio 1.0.", FutureWarning, stacklevel=2) transform = Affine.from_gdal(*transform) else: transform = Affine(*transform) a, e = transform.a, transform.e if a == 0.0 or e == 0.0: raise ValueError( "Transform has invalid coefficients a, e: (%f, %f)" % ( transform.a, transform.e)) return transform
def Extract_Soil_Texture2(self): import numpy as np from affine import Affine lon_cell_size = abs(self.lon_grid[0] - self.lon_grid[1]) lat_cell_size = abs(self.lat_grid[0] - self.lat_grid[1]) min_lon = min(self.lon_grid) - lon_cell_size/2.0*0. min_lat = min(self.lat_grid) - lat_cell_size/2.0*0. n_lat = np.size(self.lat) n_lon = np.size(self.lon) aff = Affine.from_gdal(min_lon, lon_cell_size, 0.0, min_lat, 0.0, lat_cell_size) lon = np.reshape(np.repeat(self.lon,n_lat), (n_lon,n_lat)); lat = np.transpose(np.reshape(np.repeat(self.lat,n_lon), (n_lat,n_lon))); x_coords, y_coords = ~aff * (lon, lat) x_coords = np.round(x_coords).astype(np.int) y_coords = np.round(y_coords).astype(np.int) if np.size(x_coords) >= 1 and np.size(y_coords) >= 1: clay_perc0 = self.Clay_percent[y_coords, x_coords] sand_perc0 = self.Sand_percent[y_coords, x_coords] silt_perc0 = self.Silt_percent[y_coords, x_coords] peat_perc0 = self.Peat_percent[y_coords, x_coords] clay_perc = np.transpose(np.reshape(clay_perc0, (n_lon, n_lat))) sand_perc = np.transpose(np.reshape(sand_perc0, (n_lon, n_lat))) silt_perc = np.transpose(np.reshape(silt_perc0, (n_lon, n_lat))) peat_perc = np.transpose(np.reshape(peat_perc0, (n_lon, n_lat))) else: clay_perc = np.nan; sand_perc = np.nan; silt_perc = np.nan; peat_perc = np.nan; return clay_perc, sand_perc, silt_perc, peat_perc
def test_data_dir_3(tmpdir): kwargs = { "crs": {'init': 'epsg:4326'}, "transform": Affine.from_gdal(-114, 0.1, 0, 46, 0, -0.1), "count": 4, "dtype": rasterio.uint8, "driver": "GTiff", "width": 32, "height": 32, "compress": "JPEG" } with rasterio.Env(): with rasterio.open(str(tmpdir.join('a.tif')), 'w', **kwargs) as dst: data = numpy.ones((4, 32, 32), dtype=rasterio.uint8) data[3, :, :] = 255 dst.write(data) return tmpdir
def test_reproject(): from rasterio.warp import reproject from rasterio.enums import Resampling with rasterio.Env(): # As source: a 1024 x 1024 raster centered on 0 degrees E and 0 # degrees N, each pixel covering 15". rows, cols = src_shape = (1024, 1024) # decimal degrees per pixel d = 1.0 / 240 # The following is equivalent to # A(d, 0, -cols*d/2, 0, -d, rows*d/2). src_transform = rasterio.Affine.translation( -cols*d/2, rows*d/2) * rasterio.Affine.scale(d, -d) src_crs = {'init': 'EPSG:4326'} source = np.ones(src_shape, np.uint8) * 255 # Destination: a 2048 x 2048 dataset in Web Mercator (EPSG:3857) # with origin at 0.0, 0.0. dst_shape = (2048, 2048) dst_transform = Affine.from_gdal( -237481.5, 425.0, 0.0, 237536.4, 0.0, -425.0) dst_crs = {'init': 'EPSG:3857'} destination = np.zeros(dst_shape, np.uint8) reproject( source, destination, src_transform=src_transform, src_crs=src_crs, dst_transform=dst_transform, dst_crs=dst_crs, resampling=Resampling.nearest) # Assert that the destination is only partly filled. assert destination.any() assert not destination.all()
def extract_point_from_raster(point, data_source, band_number=1): """Return floating-point value that corresponds to given point.""" # Convert point co-ordinates so that they are in same projection as raster point_sr = point.GetSpatialReference() raster_sr = osr.SpatialReference() raster_sr.ImportFromWkt(data_source.GetProjection()) transform = osr.CoordinateTransformation(point_sr, raster_sr) point.Transform(transform) # Convert geographic co-ordinates to pixel co-ordinates x, y = point.GetX(), point.GetY() forward_transform = Affine.from_gdal(*data_source.GetGeoTransform()) reverse_transform = ~forward_transform px, py = reverse_transform * (x, y) px, py = int(px + 0.5), int(py + 0.5) # Extract pixel value band = data_source.GetRasterBand(band_number) structval = band.ReadRaster(px, py, 1, 1, buf_type=gdal.GDT_Float32) result = struct.unpack('f', structval)[0] if result == band.GetNoDataValue(): result = float('nan') return result
def project(self,projection,method='bilinear'): """Project Grid2D data into desired projection. :param projection: Valid proj4 projection string. :param method: One of the sampling methods described here: https://mapbox.github.io/rasterio/topics/resampling.html#resampling-methods :raises DataSetException: If input projection is not a valid Proj4 string. If method is not a valid resampling method found in above URL. :returns: Re-projected Grid2D object. """ #check to see if the input projection is valid srs = osr.SpatialReference() srs.ImportFromProj4(projection) if srs.ExportToProj4() == '': raise DataSetException('%s is not a valid proj4 string.' % geodict['projection']) #check to see if the input resampling method is valid int_method = 1 #bi-linear try: int_method = getattr(Resampling,method) except AttributeError as ae: raise DataSetException('%s is not a valid resampling method.' % method) with Env(): #get the dimensions of the input data nrows, ncols = src_shape = self._data.shape #define the input Affine object src_transform = Affine.from_gdal(self._geodict.xmin - self._geodict.dx/2.0, self._geodict.dx, 0.0, #x rotation, not used by us self._geodict.ymax + self._geodict.dy/2.0, 0.0, #y rotation, not used by us -1*self._geodict.dy) #their dy is negative #set the source and destination projections (have to be CRS dictionaries) src_crs = CRS().from_string(self._geodict.projection).to_dict() dst_crs = CRS().from_string(projection).to_dict() #determine the boundaries in src coordinates if self._geodict.xmin < self._geodict.xmax: right = self._geodict.xmax - (self._geodict.dx/2.0) else: txmax = self._geodict.xmax + 360 right = txmax - (self._geodict.dx/2.0) left = self._geodict.xmin - (self._geodict.dx/2.0) top = self._geodict.ymax + (self._geodict.dy/2.0) bottom = self._geodict.ymin + (self._geodict.dy/2.0) #use this convenience function to determine optimal output transform and dimensions dst_transform,width,height = calculate_default_transform(src_crs,dst_crs, ncols,nrows, left,bottom, right,top) #allocate space for output data (very C-like) destination = np.zeros((height,width)) #if the input has nan values, then tell reproject about that #and set the output to that value as well src_nan = None dst_nan = None if np.any(np.isnan(self._data)): src_nan = np.nan dst_nan = np.nan if self._data.dtype in (np.float32,np.float64): src_nan = np.nan dst_nan = np.nan #call the reproject function reproject( self._data, destination, src_transform=src_transform, src_crs=src_crs, dst_transform=dst_transform, src_nodata=src_nan, dst_nodata=dst_nan, dst_crs=projection, resampling=int_method) #get the pieces of the output transformation xmin,dx,xrot,ymax,yrot,mdy = dst_transform.to_gdal() #affine dy is negative, so we have to flip it back dy = -1*mdy #correct for different pixel offsets xmin = xmin + (dx/2.0) ymax = ymax - (dy/2.0) #Construct a new GeoDict gdict = {'xmin':xmin, 'xmax':xmin+width*dx, 'ymin':ymax-height*dy, 'ymax':ymax, 'dx':dx, 'dy':dy, 'nx':width, 'ny':height, 'projection':projection} geodict = GeoDict(gdict,adjust='bounds') #Make a new Grid2D object and return it newgrid = Grid2D(destination,geodict) return newgrid
def run(): # TODO - update at each use # drive_path = os.path.join('/', 'Volumes', 'SeagateExpansionDrive', ) # #tiff_path = os.path.join(drive_path, "jan_metric", 'for_stacking', 'aligned_nlcd_full_warp_near_clip_3336.tif') # stack_location = os.path.join(drive_path, "jan_metric_PHX_GR", 'green_river_stack', 'stack_20150728_ETrF_NDVI') drive_path = os.path.join('/', 'Users', 'Gabe', 'Desktop', 'juliet_problem') # tiff_path = os.path.join(drive_path, "jan_metric", 'for_stacking', 'aligned_nlcd_full_warp_near_clip_3336.tif') stack_location = os.path.join(drive_path, 'juliet_stack') #### find the right window to use. # First get the minimum raster extent. comparison_list = [] comparison_dict = {} for directory_path, subdir, file in os.walk(stack_location, topdown=False): for tf in file: if tf.endswith(".tif"): tiff_path = os.path.join(directory_path, tf) with rasterio.open(tiff_path) as src: ras = src.read(1) # raster.shape -> (###,###) # # raster.shape[1] raster.shape[0] comparison_list.append(ras.shape[0]*ras.shape[1]) comparison_dict["{}".format(ras.shape[0]*ras.shape[1])] = tiff_path # get the minimum dimensions raster. val = min(comparison_list) min_raster_path = comparison_dict["{}".format(val)] print (min_raster_path) with rasterio.open(min_raster_path) as raster: ras = raster.read(1) print 'ras shape 0', ras.shape[0] print 'ras shape 1', ras.shape[1] window = ((0, ras.shape[0]), (0, ras.shape[1])) print "WINDOW", window bounds = raster.window_bounds(window) print "BOUNDS", bounds # Take the bounds from the minimum raster and for each raster in the dir, # get the correct window to be read in for the dict using the bounds from the min raster. raster_dict = {} window_lst = [] for directory_path, subdir, file in os.walk(stack_location, topdown=False): for tf in file: if tf.endswith(".tif"): tiff_path = os.path.join(directory_path, tf) with rasterio.open(tiff_path) as r: T0 = r.affine # upper-left pixel corner affine transform print "Here is T0", T0 window = r.window(*bounds) print "edited window", window top_left = [bounds[0], bounds[-1]] print "Here is top left", top_left print 'r.window', r.window A = r.read(1, window=window) print "A", A print "A shape", A.shape # top left x and top left y coord from bounds of window tlx = bounds[0] tly = bounds[-1] geotransform = (tlx, T0[0], 0.0, tly, 0.0, T0[4]) # make an Affine transformation matrix out of the geotransform fwd = Affine.from_gdal(*geotransform) # use the affine matrix to shift half a raster over. T1 = fwd * Affine.translation(0.5, 0.5) print " The new T1 ", T1 # make a grid to hold the columns and rows based on the shape of the raster you read in. col, row = np.meshgrid(np.arange(A.shape[1]), np.arange(A.shape[0])) # convert col and row to UTM centroids using translated affine matrix x, y = T1 * (col, row) # add the rasters to a raster dict. raster_dict['{}'.format(tf.split(".")[0])] = (A, tiff_path) # make the list of northings y and eastings x x_list = x.ravel().tolist() y_list = y.ravel().tolist() print "Starting the formatter" # add the raster dict and the list of x and y coordinates separately. df = data_frame_formatter(x_list, y_list, raster_dict) #graph ETrF vs NDVI and ideal ETrF vs NDVI grapher(df)
def rasterizeFromGeometry(cls,shapes,samplegeodict,burnValue=1.0,fillValue=np.nan,allTouched=True,attribute=None): """ Create a Grid2D object from vector shapes, where the presence of a shape (point, line, polygon) inside a cell turns that cell "on". :param shapes: One of: - One shapely geometry object (Point, Polygon, etc.) or a sequence of such objects - One GeoJSON like object or sequence of such objects. (http://geojson.org/) - A tuple of (geometry,value) or sequence of (geometry,value). :param samplegeodict: GeoDict with at least xmin,xmax,ymin,ymax,xdim,ydim values set. :param burnValue: Optional value which will be used to set the value of the pixels if there is no value in the geometry field. :param fillValue: Optional value which will be used to fill the cells not touched by any geometry. :param allTouched: Optional boolean which indicates whether the geometry must touch the center of the cell or merely be inside the cell in order to set the value. :raises DataSetException: When geometry input is not a subclass of shapely.geometry.base.BaseGeometry. :returns: Grid2D object. This method is a thin wrapper around rasterio->features->rasterize(), documented here: https://github.com/mapbox/rasterio/blob/master/docs/features.rst which is itself a Python wrapper around the functionality found in gdal_rasterize, documented here: http://www.gdal.org/gdal_rasterize.html """ #check the type of shapes #features.rasterize() documentation says this: #iterable of (geometry, value) pairs or iterable over #geometries. `geometry` can either be an object that implements #the geo interface or GeoJSON-like object. #figure out whether this is a single shape or a sequence of shapes isGeoJSON = False isGeometry = False isSequence = False isTuple = False if hasattr(shapes, '__iter__'): if isinstance(shapes[0],tuple): isTuple = True isOk = False isShape = False if isinstance(shapes,shapely.geometry.base.BaseGeometry): isOk = True isShape = True elif len(shapes) and isinstance(shapes[0],shapely.geometry.base.BaseGeometry): isOk = True isShape = True elif isinstance(shapes,dict) and shapes.has_key('geometry') and shapes.has_key('properties'): isOk = True elif len(shapes) and isinstance(shapes[0],dict) and shapes[0].has_key('geometry') and shapes[0].has_key('properties'): isOk = True else: pass if not isOk: raise DataSetException('shapes must be a single shapely object or sequence of them, or single Geo-JSON like-object') if not isShape: shapes2 = [] for shape in shapes: geometry = shape['geometry'] props = shape['properties'] if attribute is not None: if not props.has_key(attribute): raise DataSetException('Input shapes do not have attribute "%s".' % attribute) value = props[attribute] if not isinstance(value (int,float,long)): raise DataSetException('value from input shapes object is not a number') else: value = burnValue shapes2.append((geometry,value)) shapes = shapes2 xmin,xmax,ymin,ymax = (samplegeodict['xmin'],samplegeodict['xmax'],samplegeodict['ymin'],samplegeodict['ymax']) xdim,ydim = (samplegeodict['xdim'],samplegeodict['ydim']) xvar = np.arange(xmin,xmax+xdim,xdim) yvar = np.arange(ymin,ymax+ydim,ydim) ncols = len(xvar) nrows = len(yvar) #the rasterize function assumes a pixel registered data set, where we are grid registered. In order to make this work #we need to adjust the edges of our grid out by half a cell width in each direction. txmin = xmin - xdim/2.0 tymax = ymax + ydim/2.0 outshape = (nrows,ncols) transform = Affine.from_gdal(txmin,xdim,0.0,tymax,0.0,-ydim) img = features.rasterize(shapes,out_shape=outshape,fill=fillValue,transform=transform,all_touched=allTouched,default_value=burnValue) geodict = {'xmin':xmin,'xmax':xmax,'ymin':ymin,'ymax':ymax,'xdim':xdim,'ydim':ydim,'nrows':nrows,'ncols':ncols} return cls(img,geodict)
# Pixel size in the web mercator projection pixel_width = abs((top_left_web[0] - bottom_right_web[0]) / img_width) pixel_height = abs((top_left_web[1] - bottom_right_web[1]) / img_height) # Gdal Transform matrix geotransform = ( top_left_web[0], # top left corner x pixel_width, # pixel width, 0.0, # rotation about y-axis top_left_web[1], # top left corner y 0.0, # rotation about x-axis -pixel_height, # pixel height. ) pix2coord = Affine.from_gdal(*geotransform) coord2pix = ~pix2coord def wgs2pix(x, y): """ Gets WGS84 coordinates and transforms them to x,y pixel indices in the target image """ # Reoject WGS84 coords to web mercator web_xy = pyproj.transform(wgs, web, x, y) # Tranform web mercator into pixel indexes pix_x, pix_y = coord2pix * web_xy # Round the pixel indexes return int(pix_x), int(pix_y)
def gen_zonal_stats( vectors, raster, layer=0, band=1, nodata=None, affine=None, stats=None, all_touched=False, categorical=False, category_map=None, add_stats=None, zone_func=None, raster_out=False, prefix=None, geojson_out=False, **kwargs): """Zonal statistics of raster values aggregated to vector geometries. Parameters ---------- vectors: path to an vector source or geo-like python objects raster: ndarray or path to a GDAL raster source If ndarray is passed, the ``affine`` kwarg is required. layer: int or string, optional If `vectors` is a path to an fiona source, specify the vector layer to use either by name or number. defaults to 0 band: int, optional If `raster` is a GDAL source, the band number to use (counting from 1). defaults to 1. nodata: float, optional If `raster` is a GDAL source, this value overrides any NODATA value specified in the file's metadata. If `None`, the file's metadata's NODATA value (if any) will be used. defaults to `None`. affine: Affine instance required only for ndarrays, otherwise it is read from src stats: list of str, or space-delimited str, optional Which statistics to calculate for each zone. All possible choices are listed in ``utils.VALID_STATS``. defaults to ``DEFAULT_STATS``, a subset of these. all_touched: bool, optional Whether to include every raster cell touched by a geometry, or only those having a center point within the polygon. defaults to `False` categorical: bool, optional category_map: dict A dictionary mapping raster values to human-readable categorical names. Only applies when categorical is True add_stats: dict with names and functions of additional stats to compute, optional zone_func: callable function to apply to zone ndarray prior to computing stats raster_out: boolean Include the masked numpy array for each feature?, optional Each feature dictionary will have the following additional keys: mini_raster_array: The clipped and masked numpy array mini_raster_affine: Affine transformation mini_raster_nodata: NoData Value prefix: string add a prefix to the keys (default: None) geojson_out: boolean Return list of GeoJSON-like features (default: False) Original feature geometry and properties will be retained with zonal stats appended as additional properties. Use with `prefix` to ensure unique and meaningful property names. Returns ------- generator of dicts (if geojson_out is False) Each item corresponds to a single vector feature and contains keys for each of the specified stats. generator of geojson features (if geojson_out is True) GeoJSON-like Feature as python dict """ stats, run_count = check_stats(stats, categorical) # Handle 1.0 deprecations transform = kwargs.get('transform') if transform: warnings.warn("GDAL-style transforms will disappear in 1.0. " "Use affine=Affine.from_gdal(*transform) instead", DeprecationWarning) if not affine: affine = Affine.from_gdal(*transform) cp = kwargs.get('copy_properties') if cp: warnings.warn("Use `geojson_out` to preserve feature properties", DeprecationWarning) bn = kwargs.get('band_num') if bn: warnings.warn("Use `band` to specify band number", DeprecationWarning) band = band_num with Raster(raster, affine, nodata, band) as rast: features_iter = read_features(vectors, layer) for _, feat in enumerate(features_iter): geom = shape(feat['geometry']) if 'Point' in geom.type: geom = boxify_points(geom, rast) geom_bounds = tuple(geom.bounds) fsrc = rast.read(bounds=geom_bounds) # rasterized geometry rv_array = rasterize_geom(geom, like=fsrc, all_touched=all_touched) # nodata mask isnodata = (fsrc.array == fsrc.nodata) # add nan mask (if necessary) if np.issubdtype(fsrc.array.dtype, float) and \ np.isnan(fsrc.array.min()): isnodata = (isnodata | np.isnan(fsrc.array)) # Mask the source data array # mask everything that is not a valid value or not within our geom masked = np.ma.MaskedArray( fsrc.array, mask=(isnodata | ~rv_array)) # execute zone_func on masked zone ndarray if zone_func is not None: if not callable(zone_func): raise TypeError(('zone_func must be a callable ' 'which accepts function a ' 'single `zone_array` arg.')) zone_func(masked) if masked.compressed().size == 0: # nothing here, fill with None and move on feature_stats = dict([(stat, None) for stat in stats]) if 'count' in stats: # special case, zero makes sense here feature_stats['count'] = 0 else: if run_count: keys, counts = np.unique(masked.compressed(), return_counts=True) pixel_count = dict(zip([np.asscalar(k) for k in keys], [np.asscalar(c) for c in counts])) if categorical: feature_stats = dict(pixel_count) if category_map: feature_stats = remap_categories(category_map, feature_stats) else: feature_stats = {} if 'min' in stats: feature_stats['min'] = float(masked.min()) if 'max' in stats: feature_stats['max'] = float(masked.max()) if 'mean' in stats: feature_stats['mean'] = float(masked.mean()) if 'count' in stats: feature_stats['count'] = int(masked.count()) # optional if 'sum' in stats: feature_stats['sum'] = float(masked.sum()) if 'std' in stats: feature_stats['std'] = float(masked.std()) if 'median' in stats: feature_stats['median'] = float(np.median(masked.compressed())) if 'majority' in stats: feature_stats['majority'] = float(key_assoc_val(pixel_count, max)) if 'minority' in stats: feature_stats['minority'] = float(key_assoc_val(pixel_count, min)) if 'unique' in stats: feature_stats['unique'] = len(list(pixel_count.keys())) if 'range' in stats: try: rmin = feature_stats['min'] except KeyError: rmin = float(masked.min()) try: rmax = feature_stats['max'] except KeyError: rmax = float(masked.max()) feature_stats['range'] = rmax - rmin for pctile in [s for s in stats if s.startswith('percentile_')]: q = get_percentile(pctile) pctarr = masked.compressed() feature_stats[pctile] = np.percentile(pctarr, q) if 'nodata' in stats: featmasked = np.ma.MaskedArray(fsrc.array, mask=np.logical_not(rv_array)) feature_stats['nodata'] = float((featmasked == fsrc.nodata).sum()) if add_stats is not None: for stat_name, stat_func in add_stats.items(): feature_stats[stat_name] = stat_func(masked) if raster_out: feature_stats['mini_raster_array'] = masked feature_stats['mini_raster_affine'] = fsrc.affine feature_stats['mini_raster_nodata'] = fsrc.nodata if prefix is not None: prefixed_feature_stats = {} for key, val in feature_stats.items(): newkey = "{}{}".format(prefix, key) prefixed_feature_stats[newkey] = val feature_stats = prefixed_feature_stats if geojson_out: for key, val in feature_stats.items(): if 'properties' not in feat: feat['properties'] = {} feat['properties'][key] = val yield feat else: yield feature_stats
def rasterizeFromGeometry(cls,shapes,geodict,burnValue=1.0,fillValue=np.nan, mustContainCenter=False,attribute=None): """ Create a Grid2D object from vector shapes, where the presence of a shape (point, line, polygon) inside a cell turns that cell "on". :param shapes: One of: - One shapely geometry object (Point, Polygon, etc.) or a sequence of such objects - One GeoJSON like object or sequence of such objects. (http://geojson.org/) - A tuple of (geometry,value) or sequence of (geometry,value). :param geodict: GeoDict object which defines the grid onto which the shape values should be "burned". :param burnValue: Optional value which will be used to set the value of the pixels if there is no value in the geometry field. :param fillValue: Optional value which will be used to fill the cells not touched by any geometry. :param mustContainCenter: Optional boolean which indicates whether the geometry must touch the center of the cell or merely be inside the cell in order to set the value. :raises DataSetException: When geometry input is not a subclass of shapely.geometry.base.BaseGeometry. :returns: Grid2D object. This method is a thin wrapper around rasterio->features->rasterize(), documented here: https://github.com/mapbox/rasterio/blob/master/docs/features.rst which is itself a Python wrapper around the functionality found in gdal_rasterize, documented here: http://www.gdal.org/gdal_rasterize.html """ #check the type of shapes #features.rasterize() documentation says this: #iterable of (geometry, value) pairs or iterable over #geometries. `geometry` can either be an object that implements #the geo interface or GeoJSON-like object. #create list of allowable types if sys.version_info.major == 2: types = (int,float,long) else: types = (int,float) #figure out whether this is a single shape or a sequence of shapes isGeoJSON = False isGeometry = False isSequence = False isTuple = False if hasattr(shapes, '__iter__'): if isinstance(shapes[0],tuple): isTuple = True isOk = False isShape = False if isinstance(shapes,shapely.geometry.base.BaseGeometry): isOk = True isShape = True elif len(shapes) and isinstance(shapes[0],shapely.geometry.base.BaseGeometry): isOk = True isShape = True elif isinstance(shapes,dict) and 'geometry' in shapes and 'properties' in shapes: isOk = True elif len(shapes) and isinstance(shapes[0],dict) and 'geometry' in shapes[0] and 'properties' in shapes[0]: isOk = True else: pass if not isOk: raise DataSetException('shapes must be a single shapely object or sequence of them, or single Geo-JSON like-object') if not isShape: shapes2 = [] for shape in shapes: geometry = shape['geometry'] props = shape['properties'] if attribute is not None: if not attribute in props: raise DataSetException('Input shapes do not have attribute "%s".' % attribute) value = props[attribute] if not isinstance(value,types): raise DataSetException('value from input shapes object is not a number') else: value = burnValue shapes2.append((geometry,value)) shapes = shapes2 xmin,xmax,ymin,ymax = (geodict.xmin,geodict.xmax,geodict.ymin,geodict.ymax) dx,dy = (geodict.dx,geodict.dy) if xmax < xmin: xmax += 360 xvar = np.arange(xmin,xmax+(dx*0.1),dx) yvar = np.arange(ymin,ymax+(dy*0.1),dy) nx = len(xvar) ny = len(yvar) #the rasterize function assumes a pixel registered data set, where we are grid registered. In order to make this work #we need to adjust the edges of our grid out by half a cell width in each direction. txmin = xmin - dx/2.0 tymax = ymax + dy/2.0 outshape = (ny,nx) transform = Affine.from_gdal(txmin,dx,0.0,tymax,0.0,-dy) allTouched = not mustContainCenter img = features.rasterize(shapes,out_shape=outshape,fill=fillValue,transform=transform,all_touched=allTouched,default_value=burnValue) #geodict = GeoDict({'xmin':xmin,'xmax':xmax,'ymin':ymin,'ymax':ymax,'dx':dx,'dy':dy,'ny':ny,'nx':nx}) # gd = geodict.asDict() # ny,nx = img.shape # gd['nx'] = nx # gd['ny'] = ny # geodict = GeoDict(gd,adjust='bounds') return cls(img,geodict)
from __future__ import absolute_import import numpy as np import rectifiedgrid as rg from affine import Affine from shapely import geometry epsg = 3035 gtransform = Affine.from_gdal(4500000.0, 10000.0, 0.0, 1560000.0, 0.0, -10000.0) arr1 = np.zeros((9, 9), np.float64) arr1[:3, :3] = np.array([[-1, -2, -3], [0, -1, -2], [0, 0, -1]]) arr1[4:8, 4:8] = np.array([[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]]) arr2 = np.zeros((9, 9), np.float64) arr2[4, 4] = 1 arr3 = np.zeros((9, 9), np.float64) arr3[0, 0] = 1 lstring = geometry.LineString(((12.0, 36.0), (13.0, 37.0))) def get_demo_data(name='default'):
import logging import sys import pytest from affine import Affine import numpy import rasterio from rasterio.warp import ( reproject, RESAMPLING, transform_geom, transform, transform_bounds, calculate_default_transform) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) DST_TRANSFORM = Affine.from_gdal(-8789636.708, 300.0, 0.0, 2943560.235, 0.0, -300.0) class ReprojectParams(object): """ Class to assist testing reprojection by encapsulating parameters """ def __init__(self, left, bottom, right, top, width, height, src_crs, dst_crs): self.width = width self.height = height src_res = float(right - left) / float(width) self.src_transform = Affine(src_res, 0, left, 0, -src_res, top) self.src_crs = src_crs self.dst_crs = dst_crs with rasterio.drivers(): dt, dw, dh = calculate_default_transform(
def gen_zonal_stats( vectors, raster, layer=0, band_num=1, nodata=None, affine=None, stats=None, all_touched=False, percent_cover_scale=None, percent_cover_selection=None, percent_cover_weighting=False, latitude_correction=False, categorical=False, category_map=None, add_stats=None, raster_out=False, prefix=None, save_properties=False, geojson_out=False, **kwargs): """Zonal statistics of raster values aggregated to vector geometries. Parameters ---------- vectors: path to an vector source or geo-like python objects raster: ndarray or path to a GDAL raster source If ndarray is passed, the ``affine`` kwarg is required. layer: int or string, optional If `vectors` is a path to an fiona source, specify the vector layer to use either by name or number. defaults to 0 band_num: int, optional If `raster` is a GDAL source, the band number to use (counting from 1). defaults to 1. nodata: float, optional If `raster` is a GDAL source, this value overrides any NODATA value specified in the file's metadata. If `None`, the file's metadata's NODATA value (if any) will be used. defaults to `None`. affine: Affine instance required only for ndarrays, otherwise it is read from src stats: list of str, or space-delimited str, optional Which statistics to calculate for each zone. All possible choices are listed in ``utils.VALID_STATS``. defaults to ``DEFAULT_STATS``, a subset of these. all_touched: bool, optional Whether to include every raster cell touched by a geometry, or only those having a center point within the polygon. defaults to `False` percent_cover_scale: int, optional Scale used when generating percent coverage estimates of each raster cell by vector feature. Percent coverage is generated by rasterizing the feature at a finer resolution than the raster (based on percent_cover_scale value) then using a summation to aggregate to the raster resolution and dividing by the square of percent_cover_scale to get percent coverage value for each cell. Increasing percent_cover_scale will increase the accuracy of percent coverage values; three orders magnitude finer resolution (percent_cover_scale=1000) is usually enough to get coverage estimates with <1% error in individual edge cells coverage estimates, though much smaller values (e.g., percent_cover_scale=10) are often sufficient (<10% error) and require less memory. percent_cover_selection: float, optional Include only raster cells that have at least the given percent covered by the vector feature. Requires percent_cover_scale argument be used to specify scale at which to generate percent coverage estimates percent_cover_weighting: bool, optional x latitude_correction: bool, optional * For use with WGS84 raster data only. * Only applies to "mean" stat. Weights cell values when generating statistics based on latitude (using haversin function) in order to account for actual area represented by pixel cell. categorical: bool, optional category_map: dict A dictionary mapping raster values to human-readable categorical names. Only applies when categorical is True add_stats: dict with names and functions of additional stats to compute, optional raster_out: boolean Include the masked numpy array for each feature?, optional Each feature dictionary will have the following additional keys: mini_raster_array: The clipped and masked numpy array mini_raster_affine: Affine transformation mini_raster_nodata: NoData Value prefix: string add a prefix to the keys (default: None) save_properties: boolean Returns original features along with specified stats when geojson_out is set to False. geojson_out: boolean Return list of GeoJSON-like features (default: False) Original feature geometry and properties will be retained with zonal stats appended as additional properties. Use with `prefix` to ensure unique and meaningful property names. Returns ------- generator of dicts (if geojson_out is False) Each item corresponds to a single vector feature and contains keys for each of the specified stats. If save_properties is True, also contains original properties generator of geojson features (if geojson_out is True) GeoJSON-like Feature as python dict """ stats, run_count = check_stats(stats, categorical) # Handle 1.0 deprecations transform = kwargs.get('transform') if transform: warnings.warn("GDAL-style transforms will disappear in 1.0. " "Use affine=Affine.from_gdal(*transform) instead", DeprecationWarning) if not affine: affine = Affine.from_gdal(*transform) ndv = kwargs.get('nodata_value') if ndv: warnings.warn("Use `nodata` instead of `nodata_value`", DeprecationWarning) if not nodata: nodata = ndv cp = kwargs.get('copy_properties') if cp: warnings.warn("Use `geojson_out` or `save_properties` to preserve feature properties", DeprecationWarning) valid_percent_cover_stat = any([i in stats for i in ['mean', 'count', 'sum']]) if percent_cover_weighting and not percent_cover_selection and not valid_percent_cover_stat: warnings.warn('`percent_cover_weighting` option selected but no stats which can use percent cover were selected') percent_cover_weighting = False percent_cover = False if percent_cover_weighting or percent_cover_selection is not None: percent_cover = True if percent_cover_scale is None: raise Exception('Value for `percent_cover_scale must be provided in order ' 'to use percent_cover_weighting or percent_cover_selection') try: if percent_cover_scale != int(percent_cover_scale): warnings.warn('Value for `percent_cover_scale` given ({0}) ' 'was converted to int ({1}) but does not ' 'match original value'.format( percent_cover_scale, int(percent_cover_scale))) percent_cover_scale = int(percent_cover_scale) if percent_cover_scale <= 1: raise Exception('Value for `percent_cover_scale` must be greater than one ' '({0})'.format(percent_cover_scale)) except: raise Exception('Invalid value for `percent_cover_scale` provided ' '({0}). Must be type int.'.format(percent_cover_scale)) if percent_cover_selection is not None: try: percent_cover_selection = float(percent_cover_selection) except: raise Exception('Invalid value for `percent_cover_selection` provided ' '({0}). Must be type float.'.format(percent_cover_selection)) # if not all_touched: # warnings.warn('`all_touched` was not enabled but an option requiring ' # 'percent_cover calculations was selected. Automatically ' # 'enabling `all_touched`.') # all_touched = True with Raster(raster, affine, nodata, band_num) as rast: features_iter = read_features(vectors, layer) for i, feat in enumerate(features_iter): geom = shape(feat['geometry']) feature_stats = {} if 'Point' in geom.type: percent_cover = False geom = boxify_points(geom, rast) geom_bounds = tuple(geom.bounds) try: fsrc = rast.read(bounds=geom_bounds) fsrc_nodata = copy(fsrc.nodata) fsrc_affine = copy(fsrc.affine) fsrc_shape = copy(fsrc.shape) except MemoryError: print "Memory Error (fsrc): \n" print feat['properties'] continue if percent_cover: try: rv_array = rasterize_pctcover_geom( geom, shape=fsrc_shape, affine=fsrc_affine, scale=percent_cover_scale, all_touched=all_touched) assert rv_array.shape == fsrc_shape except MemoryError: print "Memory Error (percent_cover rv_array: \n" print feat['properties'] continue else: try: # create ndarray of rasterized geometry rv_array = rasterize_geom( geom, shape=fsrc_shape, affine=fsrc_affine, all_touched=all_touched) assert rv_array.shape == fsrc_shape except MemoryError: print "Memory Error (standard rv_array): \n" print feat['properties'] continue try: # Mask the source data array with our current feature # we take the logical_not to flip 0<->1 for the correct mask effect # we also mask out nodata values explicitly if percent_cover_selection is not None: masked = np.ma.MaskedArray( fsrc.array, mask=np.logical_or( fsrc.array == fsrc_nodata, np.logical_not(rv_array), percent_cover > percent_cover_selection)) else: masked = np.ma.MaskedArray( fsrc.array, mask=np.logical_or( fsrc.array == fsrc_nodata, np.logical_not(rv_array))) # print masked # print rv_array # print masked*rv_array # print ~masked.mask * rv_array except MemoryError: print "Memory Error (masked): \n" print feat['properties'] continue ### if latitude_correction and 'mean' in stats: latitude_scale = [ get_latitude_scale(fsrc_affine[5] - fsrc_affine[0] * (0.5 + i)) for i in range(fsrc_shape[0]) ] # print 'raster all data {0}'.format(fsrc.array.size) # print 'raster no data vals {0}'.format(np.sum(fsrc.array == fsrc_nodata)) # print 'vector {0}'.format(np.sum(rv_array > 0)) # print 'vector no data {0}'.format( # np.sum( # ~np.ma.MaskedArray( # rv_array, # mask=(fsrc.array == fsrc_nodata) # ).mask # ) # ) # print 'masked not masked {0}'.format(np.sum(~masked.mask)) ### if 'nodata' in stats: featmasked = np.ma.MaskedArray(fsrc.array, mask=np.logical_not(rv_array)) feature_stats['nodata'] = float((featmasked == fsrc_nodata).sum()) del fsrc if not percent_cover_weighting: del rv_array try: compressed = masked.compressed() except MemoryError: print "Memory Error (compressed): \n" print feat['properties'] continue if len(compressed) == 0: # nothing here, fill with None and move on feature_stats = dict([(stat, None) for stat in stats]) if 'count' in stats: # special case, zero makes sense here feature_stats['count'] = 0 else: if run_count: keys, counts = np.unique(compressed, return_counts=True) pixel_count = dict(zip([np.asscalar(k) for k in keys], [np.asscalar(c) for c in counts])) if categorical: feature_stats = dict(pixel_count) if category_map: feature_stats = remap_categories(category_map, feature_stats) # if 'weighted_mean' in stats: # if latitude_correction: # feature_stats['weighted_mean'] = float( # np.sum((masked.T * latitude_scale).T * raster_pctcover) / # np.sum((~masked.mask.T * latitude_scale).T * raster_pctcover)) # else: # feature_stats['weighted_mean'] = float( # np.sum(masked * raster_pctcover) / # np.sum(~masked.mask * raster_pctcover)) # if 'weighted_count' in stats: # print masked # print raster_pctcover # print masked*raster_pctcover # print ~masked.mask * raster_pctcover # feature_stats['weighted_count'] = float(np.sum(~masked.mask * raster_pctcover)) # if 'weighted_sum' in stats: # feature_stats['weighted_sum'] = float(np.sum(masked * raster_pctcover)) if 'mean' in stats: if percent_cover_weighting and latitude_correction: feature_stats['mean'] = float( np.sum((masked.T * latitude_scale).T * rv_array) / np.sum((~masked.mask.T * latitude_scale).T * rv_array)) elif percent_cover_weighting: feature_stats['mean'] = float( np.sum(masked * rv_array) / np.sum(~masked.mask * rv_array)) elif latitude_correction: feature_stats['mean'] = float( np.sum((masked.T * latitude_scale).T) / np.sum(latitude_scale * (masked.shape[1] - np.sum(masked.mask, axis=1)))) else: feature_stats['mean'] = float(compressed.mean()) if 'count' in stats: if percent_cover_weighting: feature_stats['count'] = float(np.sum(~masked.mask * rv_array)) else: feature_stats['count'] = int(len(compressed)) if 'sum' in stats: if percent_cover_weighting: feature_stats['sum'] = float(np.sum(masked * rv_array)) else: feature_stats['sum'] = float(compressed.sum()) if 'min' in stats: feature_stats['min'] = float(compressed.min()) if 'max' in stats: feature_stats['max'] = float(compressed.max()) if 'std' in stats: feature_stats['std'] = float(compressed.std()) if 'median' in stats: feature_stats['median'] = float(np.median(compressed)) if 'majority' in stats: feature_stats['majority'] = float(key_assoc_val(pixel_count, max)) if 'minority' in stats: feature_stats['minority'] = float(key_assoc_val(pixel_count, min)) if 'unique' in stats: feature_stats['unique'] = len(list(pixel_count.keys())) if 'range' in stats: try: rmin = feature_stats['min'] except KeyError: rmin = float(compressed.min()) try: rmax = feature_stats['max'] except KeyError: rmax = float(compressed.max()) feature_stats['range'] = rmax - rmin for pctile in [s for s in stats if s.startswith('percentile_')]: q = get_percentile(pctile) pctarr = compressed feature_stats[pctile] = np.percentile(pctarr, q) if add_stats is not None: for stat_name, stat_func in add_stats.items(): feature_stats[stat_name] = stat_func(masked) if raster_out: feature_stats['mini_raster_array'] = masked feature_stats['mini_raster_affine'] = fsrc_affine feature_stats['mini_raster_nodata'] = fsrc_nodata if prefix is not None: prefixed_feature_stats = {} for key, val in feature_stats.items(): newkey = "{}{}".format(prefix, key) prefixed_feature_stats[newkey] = val feature_stats = prefixed_feature_stats if geojson_out or save_properties: for key, val in feature_stats.items(): if 'properties' not in feat: feat['properties'] = {} feat['properties'][key] = val if geojson_out: yield feat else: yield feat['properties'] else: yield feature_stats
def warp(self, dem=None, proj="EPSG:4326", **kwargs): """Delayed warp across an entire AOI or Image Creates a new dask image by deferring calls to the warp_geometry on chunks Args: dem (ndarray): optional. A DEM for warping to specific elevation planes proj (str): optional. An EPSG proj string to project the image data into ("EPSG:32612") Returns: daskarray: a warped image as deferred image array """ try: img_md = self.rda.metadata["image"] x_size = img_md["tileXSize"] y_size = img_md["tileYSize"] except (AttributeError, KeyError): x_size = kwargs.get("chunk_size", 256) y_size = kwargs.get("chunk_size", 256) # Create an affine transform to convert between real-world and pixels if self.proj is None: from_proj = "EPSG:4326" else: from_proj = self.proj try: # NOTE: this only works on images that have rda rpcs metadata center = wkt.loads(self.rda.metadata["image"]["imageBoundsWGS84"]).centroid g = box(*(center.buffer(self.rda.metadata["rpcs"]["gsd"] / 2).bounds)) tfm = partial(pyproj.transform, pyproj.Proj(init="EPSG:4326"), pyproj.Proj(init=proj)) gsd = kwargs.get("gsd", ops.transform(tfm, g).area ** 0.5) current_bounds = wkt.loads(self.rda.metadata["image"]["imageBoundsWGS84"]).bounds except (AttributeError, KeyError, TypeError): tfm = partial(pyproj.transform, pyproj.Proj(init=self.proj), pyproj.Proj(init=proj)) gsd = kwargs.get("gsd", (ops.transform(tfm, shape(self)).area / (self.shape[1] * self.shape[2])) ** 0.5 ) current_bounds = self.bounds tfm = partial(pyproj.transform, pyproj.Proj(init=from_proj), pyproj.Proj(init=proj)) itfm = partial(pyproj.transform, pyproj.Proj(init=proj), pyproj.Proj(init=from_proj)) output_bounds = ops.transform(tfm, box(*current_bounds)).bounds gtf = Affine.from_gdal(output_bounds[0], gsd, 0.0, output_bounds[3], 0.0, -1 * gsd) ll = ~gtf * (output_bounds[:2]) ur = ~gtf * (output_bounds[2:]) x_chunks = int((ur[0] - ll[0]) / x_size) + 1 y_chunks = int((ll[1] - ur[1]) / y_size) + 1 num_bands = self.shape[0] try: dtype = RDA_TO_DTYPE[img_md["dataType"]] except: dtype = 'uint8' daskmeta = { "dask": {}, "chunks": (num_bands, y_size, x_size), "dtype": dtype, "name": "warp-{}".format(self.name), "shape": (num_bands, y_chunks * y_size, x_chunks * x_size) } def px_to_geom(xmin, ymin): xmax = int(xmin + x_size) ymax = int(ymin + y_size) bounds = list((gtf * (xmin, ymax)) + (gtf * (xmax, ymin))) return box(*bounds) full_bounds = box(*output_bounds) dasks = [] if isinstance(dem, GeoDaskImage): if dem.proj != proj: dem = dem.warp(proj=proj, dem=dem) dasks.append(dem.dask) for y in xrange(y_chunks): for x in xrange(x_chunks): xmin = x * x_size ymin = y * y_size geometry = px_to_geom(xmin, ymin) daskmeta["dask"][(daskmeta["name"], 0, y, x)] = (self._warp, geometry, gsd, dem, proj, dtype, 5) daskmeta["dask"], _ = optimization.cull(HighLevelGraph.merge(daskmeta["dask"], *dasks), list(daskmeta["dask"].keys())) gi = mapping(full_bounds) gt = AffineTransform(gtf, proj) image = GeoDaskImage(daskmeta, __geo_interface__ = gi, __geo_transform__ = gt) return image[box(*output_bounds)]
def from_xarray(da, crs=None, apply_transform=False, nan_nodata=False, **kwargs): """ Returns an RGB or Image element given an xarray DataArray loaded using xr.open_rasterio. If a crs attribute is present on the loaded data it will attempt to decode it into a cartopy projection otherwise it will default to a non-geographic HoloViews element. Parameters ---------- da: xarray.DataArray DataArray to convert to element crs: Cartopy CRS or EPSG string (optional) Overrides CRS inferred from the data apply_transform: boolean Whether to apply affine transform if defined on the data nan_nodata: boolean If data contains nodata values convert them to NaNs **kwargs: Keyword arguments passed to the HoloViews/GeoViews element Returns ------- element: Image/RGB/QuadMesh element """ if crs: kwargs['crs'] = crs elif hasattr(da, 'crs'): try: kwargs['crs'] = process_crs(da.crs) except: param.main.warning('Could not decode projection from crs string %r, ' 'defaulting to non-geographic element.' % da.crs) coords = list(da.coords) if coords not in (['band', 'y', 'x'], ['y', 'x']): from .element.geo import Dataset, HvDataset el = Dataset if 'crs' in kwargs else HvDataset return el(da, **kwargs) if len(coords) == 2: y, x = coords bands = 1 else: y, x = coords[1:] bands = len(da.coords[coords[0]]) if apply_transform: from affine import Affine transform = Affine.from_gdal(*da.attrs['transform'][:6]) nx, ny = da.sizes[x], da.sizes[y] xs, ys = np.meshgrid(np.arange(nx)+0.5, np.arange(ny)+0.5) * transform data = (xs, ys) else: xres, yres = da.attrs['res'] if 'res' in da.attrs else (1, 1) xs = da.coords[x][::-1] if xres < 0 else da.coords[x] ys = da.coords[y][::-1] if yres < 0 else da.coords[y] data = (xs, ys) for b in range(bands): values = da[b].values if nan_nodata and da.attrs.get('nodatavals', []): values = values.astype(float) for d in da.attrs['nodatavals']: values[values==d] = np.NaN data += (values,) if 'datatype' not in kwargs: kwargs['datatype'] = ['xarray', 'grid', 'image'] if xs.ndim > 1: from .element.geo import QuadMesh, HvQuadMesh el = QuadMesh if 'crs' in kwargs else HvQuadMesh el = el(data, [x, y], **kwargs) elif bands < 3: from .element.geo import Image, HvImage el = Image if 'crs' in kwargs else HvImage el = el(data, [x, y], **kwargs) else: from .element.geo import RGB, HvRGB el = RGB if 'crs' in kwargs else HvRGB vdims = el.vdims[:bands] el = el(data, [x, y], vdims, **kwargs) if hasattr(el.data, 'attrs'): el.data.attrs = da.attrs return el
def dissolve_polygon(input_raster,output_file,dir): #os.chdir(dir) ds = gdal.Open(input_raster) print(ds) print(input_raster) if os.path.exists(output_file): drv.DeleteDataSource(output_file) grid_code=[] band = ds.GetRasterBand(1) myarray =(ds.GetRasterBand(1).ReadAsArray()) #print(myarray) T0 = Affine.from_gdal(*ds.GetGeoTransform()) tx=[T0[0], T0[1], T0[2], T0[3],T0[4], T0[5]] drv = ogr.GetDriverByName("ESRI Shapefile") for shp, val in rasterio.features.shapes(myarray,transform=tx): # print('%s: %s' % (val, shape(shp))) if(val>=0): grid_code.append(float(val)) #add_filed_existing_shapefile('temp1.shp',np.asarray(grid_code)) pj=[] with fiona.open('temp.shp') as input: meta = input.meta with fiona.open('temp.shp', 'r') as source: # Copy the source schema and add two new properties. sink_schema = source.schema.copy() sink_schema['properties']['ID'] = 'float' # Create a sink for processed features with the same format and # coordinate reference system as the source. with fiona.open( 'temp2.shp', 'w', crs=source.crs, driver=source.driver, schema=sink_schema, ) as sink: i=0 for f in source: #print(f) # Add the signed area of the polygon and a timestamp # to the feature properties map. f['properties'].update( ID=grid_code[i], ) i+=1 sink.write(f) pj=[] with fiona.open('temp2.shp') as input: meta = input.meta print('srt') with fiona.open('final.shp', 'w',**meta) as output: # groupby clusters consecutive elements of an iterable which have the same key so you must first sort the features by the 'STATEFP' field e = sorted(input, key=lambda k: k['properties']['ID']) print(e) # group by the 'STATEFP' field for key, group in itertools.groupby(e, key=lambda x:x['properties']['ID']): properties, geom = zip(*[(feature['properties'],shape(feature['geometry'])) for feature in group]) # write the feature, computing the unary_union of the elements in the group with the properties of the first element in the group output.write({'geometry': mapping(unary_union(geom)), 'properties': properties[0]})