def get_slr(self): """Extract SLR for any geometries that overlap bounds where SLR is available Returns ------- dict {"slr_acres": <acres>, "slr": [<slr_0ft>, <slr_1ft>, ..., <slr_6ft>]} """ slr_bounds = gp.read_feather( slr_bounds_filename).geometry.values.data[0] ix = pg.intersects(self.geometry, slr_bounds) if not ix.sum(): # No overlap return None # only extract SLR where there are overlaps slr_results = extract_slr_by_geometry(self.shapes[ix], bounds=pg.total_bounds( self.geometry[ix])) # None only if no shape mask if slr_results is None: return None slr = [slr_results[i] for i in range(7)] return {"slr_acres": slr_results["shape_mask"], "slr": slr}
def _valid_hull(geoms, points): """Sanity check within ``alpha_shape_auto()`` to verify the generated alpha shape actually contains the original set of points (xys). Parameters ---------- geoms : GeoSeries See alpha_geoms() points : list xys parameter cast as shapely.geometry.Point objects Returns ------- flag : bool Valid hull for alpha shape [True] or not [False] """ flag = True # if there is not exactly one polygon if geoms.shape[0] != 1: return False # if any (xys) points do not intersect the polygon if HAS_PYGEOS: return pygeos.intersects(pygeos.from_shapely(geoms[0]), points).all() else: for point in points: if not point.intersects(geoms[0]): return False return True
def get_slr(self): slr_bounds = gp.read_feather( slr_bounds_filename).geometry.values.data[0] ix = pg.intersects(self.geometry, slr_bounds) if not ix.sum(): # No overlap return None # only extract SLR where there are overlaps slr_results = extract_slr_by_geometry(self.shapes[ix], bounds=pg.total_bounds( self.geometry[ix])) # None only if no shape mask if slr_results is None: return None slr = [slr_results[i] for i in range(7)] return {"slr_acres": slr_results["shape_mask"], "slr": slr}
def _pandas(cls, column, **kwargs): shape = kwargs.get("shape") shape_format = kwargs.get("shape_format") column_shape_format = kwargs.get("column_shape_format") # Check that shape is given and given in the correct format if shape is not None: try: if shape_format == "wkt": shape_ref = geos.from_wkt(shape) elif shape_format == "wkb": shape_ref = geos.from_wkb(shape) elif shape_format == "geojson": shape_ref = geos.from_geojson(shape) else: raise NotImplementedError( "Shape constructor method not implemented. Must be in WKT, WKB, or GeoJSON format." ) except: raise Exception("A valid reference shape was not given.") else: raise Exception("A shape must be provided for this method.") # Load the column into a pygeos Geometry vector from numpy array (Series not supported). if column_shape_format == "wkt": shape_test = geos.from_wkt(column.to_numpy(), on_invalid="ignore") elif column_shape_format == "wkb": shape_test = geos.from_wkb(column.to_numpy(), on_invalid="ignore") else: raise NotImplementedError( "Column values shape format not implemented.") # Allow for an array of reference shapes to be provided. Return a union of all the shapes in the array (Polygon or Multipolygon) shape_ref = geos.union_all(shape_ref) # Prepare the geometries geos.prepare(shape_ref) geos.prepare(shape_test) return pd.Series(geos.intersects(shape_ref, shape_test))
def get_results(self): sa_bnd = gp.read_feather(boundary_filename) # if area of interest does not intersect SA boundary, there will be no results if not pg.intersects(self.geometry, sa_bnd.geometry.values.data).max(): return None results = { "type": "", "acres": pg.area(self.geometry).sum() * M2_ACRES, "name": self.name, } blueprint_results = self.get_blueprint() if blueprint_results is None: return None results.update(blueprint_results) urban_results = self.get_urban() if urban_results is not None: results.update(urban_results) slr_results = self.get_slr() if slr_results is not None: results.update(slr_results) ownership_results = self.get_ownership() if ownership_results is not None: results.update(ownership_results) county_results = self.get_counties() if county_results is not None: results.update(county_results) parca_results = self.get_parca() if parca_results is not None: results.update(parca_results) return results
# Now can just reduce dams back to these lineIDs dams = ( dams[["damID", "geometry"]] .join(downstreams, on="damID", how="inner") .drop_duplicates(subset=["damID", "lineID"]) .join(flowlines.geometry.rename("flowline"), on="lineID",) .reset_index(drop=True) ) print(f"Found {len(dams):,} joins between NHD dams and flowlines") ### Extract representative point # Look at either end of overlapping line and use that as representative point. # Otherwise intersect and extract first coordinate of overlapping line last_pt = pg.get_point(dams.flowline.values.data, -1) ix = pg.intersects(dams.geometry.values.data, last_pt) dams.loc[ix, "pt"] = last_pt[ix] # override with upstream most point when both intersect first_pt = pg.get_point(dams.flowline.values.data, 0) ix = pg.intersects(dams.geometry.values.data, first_pt) dams.loc[ix, "pt"] = first_pt[ix] ix = dams.pt.isnull() # WARNING: this might fail for odd intersection geoms; we always take the first line # below pt = pd.Series( pg.get_point( pg.get_geometry( pg.intersection( dams.loc[ix].geometry.values.data, dams.loc[ix].flowline.values.data
def find_dam_face_from_waterbody(waterbody, drain_pt): total_area = pg.area(waterbody) ring = pg.get_exterior_ring(pg.normalize(waterbody)) total_length = pg.length(ring) num_pts = pg.get_num_points(ring) - 1 # drop closing coordinate vertices = pg.get_point(ring, range(num_pts)) ### Extract line segments that are no more than 1/3 coordinates of polygon # starting from the vertex nearest the drain # note: lower numbers are to the right tree = pg.STRtree(vertices) ix = tree.nearest(drain_pt)[1][0] side_width = min(num_pts // 3, MAX_SIDE_PTS) left_ix = ix + side_width right_ix = ix - side_width # extract these as a left-to-write line; pts = vertices[max(right_ix, 0):min(num_pts, left_ix)][::-1] if left_ix >= num_pts: pts = np.append(vertices[0:left_ix - num_pts][::-1], pts) if right_ix < 0: pts = np.append(pts, vertices[num_pts + right_ix:num_pts][::-1]) coords = pg.get_coordinates(pts) if len(coords) > 2: # first run a simplification process to extract the major shape and bends # then run the straight line algorithm simp_coords, simp_ix = simplify_vw( coords, min(MAX_SIMPLIFY_AREA, total_area / 100)) if len(simp_coords) > 2: keep_coords, ix = extract_straight_segments( simp_coords, max_angle=MAX_STRAIGHT_ANGLE, loops=5) keep_ix = simp_ix.take(ix) else: keep_coords = simp_coords keep_ix = simp_ix else: keep_coords = coords keep_ix = np.arange(len(coords)) ### Calculate the length of each run and drop any that are not sufficiently long lengths = segment_length(keep_coords) ix = (lengths >= MIN_DAM_WIDTH) & (lengths / total_length < MAX_WIDTH_RATIO) pairs = np.dstack([keep_ix[:-1][ix], keep_ix[1:][ix]])[0] # since ranges are ragged, we have to do this in a loop instead of vectorized segments = [] for start, end in pairs: segments.append(pg.linestrings(coords[start:end + 1])) segments = np.array(segments) # only keep the segments that are close to the drain segments = segments[ pg.intersects(segments, pg.buffer(drain_pt, MAX_DRAIN_DIST)), ] if not len(segments): return segments # only keep those where the drain is interior to the line pos = pg.line_locate_point(segments, drain_pt) lengths = pg.length(segments) ix = (pos >= MIN_INTERIOR_DIST) & (pos <= (lengths - MIN_INTERIOR_DIST)) return segments[ix]
def create_mosaic( dataset_list: Tuple, minzoom: int = None, maxzoom: int = None, max_threads: int = 20, minimum_tile_cover: float = None, tile_cover_sort: bool = False, version: str = "0.0.2", quiet: bool = True, ) -> Dict: """ Create mosaic definition content. Attributes ---------- dataset_list : tuple or list, required Dataset urls. minzoom: int, optional Force mosaic min-zoom. maxzoom: int, optional Force mosaic max-zoom. minimum_tile_cover: float, optional (default: 0) Filter files with low tile intersection coverage. tile_cover_sort: bool, optional (default: None) Sort intersecting files by coverage. max_threads : int Max threads to use (default: 20). version: str, optional mosaicJSON definition version quiet: bool, optional (default: True) Mask processing steps. Returns ------- mosaic_definition : dict Mosaic definition. """ if version not in ["0.0.1", "0.0.2"]: raise Exception(f"Invalid mosaicJSON's version: {version}") if not quiet: click.echo("Get files footprint", err=True) results = get_footprints(dataset_list, max_threads=max_threads, quiet=quiet) if minzoom is None: minzoom = list(set([feat["properties"]["minzoom"] for feat in results])) if len(minzoom) > 1: warnings.warn("Multiple MinZoom, Assets different minzoom values", UserWarning) minzoom = max(minzoom) if maxzoom is None: maxzoom = list(set([feat["properties"]["maxzoom"] for feat in results])) if len(maxzoom) > 1: warnings.warn( "Multiple MaxZoom, Assets have multiple resolution values", UserWarning) maxzoom = max(maxzoom) quadkey_zoom = minzoom datatype = list(set([feat["properties"]["datatype"] for feat in results])) if len(datatype) > 1: raise Exception("Dataset should have the same data type") if not quiet: click.echo(f"Get quadkey list for zoom: {quadkey_zoom}", err=True) tiles = burntiles.burn(results, quadkey_zoom) tiles = ["{2}-{0}-{1}".format(*tile.tolist()) for tile in tiles] bounds = burntiles.find_extrema(results) mosaic_definition = dict( mosaicjson=version, minzoom=minzoom, maxzoom=maxzoom, bounds=bounds, center=[(bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2, minzoom], tiles={}, version="1.0.0", ) if version == "0.0.2": mosaic_definition.update(dict(quadkey_zoom=quadkey_zoom)) if not quiet: click.echo(f"Feed Quadkey index", err=True) dataset_geoms = polygons( [feat["geometry"]["coordinates"][0] for feat in results]) dataset = [{ "path": f["properties"]["path"], "geometry": geom } for (f, geom) in zip(results, dataset_geoms)] for parent in tiles: z, x, y = list(map(int, parent.split("-"))) parent = mercantile.Tile(x=x, y=y, z=z) quad = mercantile.quadkey(*parent) tile_geometry = polygons( mercantile.feature(parent)["geometry"]["coordinates"][0]) fdataset = [ dataset[idx] for idx in numpy.nonzero( intersects(tile_geometry, dataset_geoms))[0] ] if minimum_tile_cover is not None or tile_cover_sort: fdataset = _filter_and_sort( tile_geometry, fdataset, minimum_cover=minimum_tile_cover, sort_cover=tile_cover_sort, ) if len(fdataset): mosaic_definition["tiles"][quad] = [f["path"] for f in fdataset] return mosaic_definition
downstreams = (lines_by_dam.apply(find_downstreams).reset_index().explode( "lineID").drop_duplicates().set_index("id").lineID) # Now can just reduce dams back to these lineIDs dams = (dams[["id", "GNIS_Name", "geometry"]].join( downstreams, on="id", how="inner").drop_duplicates(subset=["id", "lineID"]).join( flowlines.geometry.rename("line"), on="lineID").reset_index(drop=True)) print("Found {:,} joins between NHD dams and flowlines".format(len(dams))) ### Extract representative point # Look at either end of overlapping line and use that as representative point. # Otherwise intersect and extract first coordinate of overlapping line first = pg.get_point(dams.line, 0) intersects_first = pg.intersects(dams.geometry, first) ix = intersects_first dams.loc[ix, "pt"] = first.loc[ix] ix = ~intersects_first last = pg.get_point(dams.loc[ix].line, -1) intersects_last = pg.intersects(dams.loc[ix].geometry, last) last = last.loc[intersects_last] dams.loc[last.index, "pt"] = last ix = dams.pt.isnull() # WARNING: this might fail for odd intersection geoms pt = pg.get_point( pg.intersection(dams.loc[ix].geometry, dams.loc[ix].line), 0).dropna() dams.loc[pt.index, "pt"] = pt