示例#1
0
def make_polygon(things, tolerance=None):
    """
    Constructs a rough-estimate polygon from a list of CGM elements such as
    POLYLINE.

    This is a rough estimate because CIRCULAR_ARC_POINT elements are not
    modeled exactly but instead the start and end points of the arc are used
    to create a straight line.
    """
    if is_area(things):
        return Polygon(things)
    else:
        # Assume this is multiple elements parsed from a CGM file
        lines = []
        for cmd, data in things:
            if cmd == 'POLYLINE':
                lines.append(LineString(data))
            elif cmd == 'CIRCULAR_ARC_POINT':
                lines.append(LineString(data[0], data[2]))

        collection = GeometryCollection(lines)

        # If tolerance is provided create a grid of vertices to snap the lines
        # into
        if tolerance:
            min_x, min_y, max_x, max_y = collection.bounds

            # round the min values down and max values up
            min_x = int(min_x - (min_x % tolerance))
            min_y = int(min_y - (min_y % tolerance))
            max_x = int(max_x + (tolerance - (max_x % tolerance)))
            max_y = int(max_y + (tolerance - (max_y % tolerance)))

            x_range = range(min_x, max_x + 1, tolerance)
            y_range = range(min_y, max_y + 1, tolerance)
            grid_coords = [(i, j) for j in x_range for i in y_range]
            snap_grid = Polygon(grid_coords)

            snapped_collection = snap(collection, snap_grid, tolerance)
            stuff = polygonize_full(snapped_collection)
        else:
            stuff = polygonize_full(collection)

        polygons = []
        for collection in stuff:
            print(collection)
            if collection and isinstance(collection[0], Polygon):
                for poly in collection:
                    plt.plot(*poly.exterior.xy)
                    polygons.append(poly)
            elif collection and tolerance is not None:
                #new_poly = _normalize_lines(collection, tolerance)
                #plt.plot(*new_poly.exterior.xy)
                #polygons.append(new_poly)
                for line in collection:
                    x_list.extend(line.xy[0])
                    y_list.extend(line.xy[1])

        plt.show()
        return polygons
示例#2
0
def along_within_parks(edges_gdf, barriers_gdf):
    """
    The function assigns to each street segment in a GeoDataFrame the list of barrierIDs corresponding to parks which lay along the street segment.
    Also street segments within parks are considered and the barriers are admitted.
        
    Parameters
    ----------
    edges_gdf: LineString GeoDataFrame
        the street segmentes GeoDataFrame 
    barriers_gdf: LineString GeoDataFrame
        the barriers GeoDataFrame
        
    Returns
    -------
    edges_gdf: LineString GeoDataFrame
        the updated street segments GeoDataFrame
    """
    
    sindex = edges_gdf.sindex
    tmp = barriers_gdf[barriers_gdf['type']=='park']
    edges_gdf['a_parks'] = edges_gdf.apply(lambda row: barriers_along(row['edgeID'], edges_gdf, tmp, sindex, offset = 200), axis = 1)
    
    # polygonize parks
    park_polygons = barriers_gdf[barriers_gdf['type']=='park'].copy()
    park_polygons['geometry'] = park_polygons.apply(lambda row: (polygonize_full(row['geometry']))[0][0], axis = 1)
    park_polygons = gpd.GeoDataFrame(park_polygons['barrierID'], geometry = park_polygons['geometry'], crs = edges_gdf.crs)
    
    edges_gdf['w_parks'] = edges_gdf.apply(lambda row: _within_parks(row['geometry'], park_polygons), axis = 1) #within
    edges_gdf['aw_parks'] = edges_gdf.apply(lambda row: list(set(row['a_parks']+row['w_parks'])), axis = 1) #along
    edges_gdf.drop(['a_parks', 'w_parks'], axis = 1, inplace = True)

    return edges_gdf
示例#3
0
def correctPolygonIfInvalid(polygon, bufferRadius):
    '''
    Correct the specified polygon if it is invalid in the following ways: it is
    self-crossing, self-intersecting, and/or that has edges running on top of one
    another. Correction is performed by breaking it into its valid subpolygons, then
    unioning these with any points of intersection (buffered to the specified radius)
    as well as any edges (again buffered to the specified radius) that lay atop one
    another in the original invalid polygon.
    @param polygon: Polygon to be corrected, if any correction is needed.
    @param bufferRadius: Radius to be applied when buffering any points of
    intersection or edges that run on top of one another.
    @return: Corrected polygon, or original polygon if no correction is needed.
    '''
    
    # Do no correcting if the polygon is already valid.
    if polygon.is_valid:
        return polygon
    
    # Take the exterior of the invalid polygon, intersect it with itself,
    # and then polygonizing the resulting multi-line strings. This in turn
    # provides a list of valid sub-polygons and a list of "dangles", line
    # strings that represent edges of the original invalid polygon that
    # ran along on top of one another.
    #
    # Note that this part of the algorithm is taken from:
    #
    #     https://gis.stackexchange.com/questions/243144/bowtie-or-hourglass-polygon-validity-issue-when-self-crossing-point-is-not-defin
    #
    # but differs in that it does not use polygonize(), which would only
    # yield the resulting sub-polygons, but instead needs the dangles as
    # well.
    exterior = polygon.exterior
    multiLineStrings = exterior.intersection(exterior)
    subPolygons, dangles, cuts, invalids = polygonize_full(multiLineStrings)
    subPolygons = list(subPolygons)
    
    # If there is more than one sub-polygon, compare all sub-polygons'
    # vertices with one another to find any intersection points, and for
    # any such intersections, record them. Then turn each into a polygon
    # using the provided buffer radius, and add it to the list of sub-
    # polygons.
    if len(subPolygons) > 1:
        intersectionCoords = set()
        subPolygonsCoords = [set(subPolygon.exterior.coords) for subPolygon in subPolygons]
        for index1 in range(0, len(subPolygons)):
            for index2 in range(index1 + 1, len(subPolygons)):
                intersectionCoords = intersectionCoords.union(subPolygonsCoords[index1].intersection(subPolygonsCoords[index2]))
        for coord in intersectionCoords:
            subPolygons.append(Point(coord).buffer(bufferRadius, 2))
    
    # Turn any edges that ran on top of one another in the original
    # invalid polygon into polygons by buffering them, and add them to
    # the list of sub-polygons.
    for dangle in list(dangles):
        subPolygons.append(dangle.buffer(bufferRadius, 2))
    
    # Finally, take the union of all the sub-polygons.
    return cascaded_union(subPolygons)
def build_geometries(diff, srid):
    srid_txt = "EPSG:{}".format(srid)
    geom = {'line': [], 'point': [], 'polygon': []}
    for osm_id, node in diff['add']['node'].iteritems():
        x, y = projections.from4326(node["geom"], srid_txt)
        geom["point"].append({"geom": "SRID={};POINT({} {})".format(srid, x, y), "osm_id": osm_id, "tags": node["tags"]})
    for osm_id, way in diff['add']['way'].iteritems():
        line = projections.from4326(way["geom"], srid_txt)
        polygonize = check_tags_if_polygon(way["tags"])
        if polygonize:
            result, dangles, cuts, invalids = polygonize_full([line])
            #print result, dangles, cuts, invalids
            for poly in result:
                geom["polygon"].append({"geom": "SRID={};{}".format(srid, poly.wkt), "osm_id": osm_id, "way_area":poly.area, "tags": way["tags"]})
            for line in dangles:
                geom["line"].append({"geom": "SRID={};{}".format(srid, line.wkt), "osm_id": osm_id, "tags": way["tags"]})
            for line in cuts:
                geom["line"].append({"geom": "SRID={};{}".format(srid, line.wkt), "osm_id": osm_id, "tags": way["tags"]})
            for line in invalids:
                geom["line"].append({"geom": "SRID={};{}".format(srid, line.wkt), "osm_id": osm_id, "tags": way["tags"]})
        else:
            ll = ",".join([a[0] + a[1] for a in line])
            geom["line"].append({"geom": "SRID={};LINESTRING({})".format(srid, ll), "osm_id": osm_id, "tags": way["tags"]})
    for osm_id, relation in diff['add']['relation'].iteritems():
        lines = [projections.from4326(way, srid_txt) for way in relation["geom"]]
        polygonize = check_tags_if_polygon(relation["tags"])
        if polygonize:
            merged = linemerge(lines)
            polys = []
            lines = []
            for line in merged:
                if line.is_ring:
                    polys.append(line)
                else:
                    lines.append(line)
            if polys:
                # TODO: repair geometry
                poly = Polygon(polys[0], polys[1:])
                geom["polygon"].append({"geom": "SRID={};{}".format(srid, poly.wkt), "osm_id": -osm_id, "way_area":0, "tags": relation["tags"]})
            for line in lines:
                geom["line"].append({"geom": "SRID={};{}".format(srid, line.wkt), "osm_id": -osm_id, "tags": relation["tags"]})

            #result, dangles, cuts, invalids = polygonize_full(lines)
            ##print result, dangles, cuts, invalids
            #result = list(result)
            #sd = result[0]
            #for poly in result[1:]:
                #sd = sd.symmetric_difference(poly)
            #geom["polygon"].append({"geom": "SRID={};{}".format(srid, sd.wkt), "osm_id": -osm_id, "way_area":sd.area, "tags": relation["tags"]})
            #for line in dangles:
                #geom["line"].append({"geom": "SRID={};{}".format(srid, line.wkt), "osm_id": -osm_id, "tags": relation["tags"]})
            #for line in cuts:
                #geom["line"].append({"geom": "SRID={};{}".format(srid, line.wkt), "osm_id": -osm_id, "tags": relation["tags"]})

        else:
            ll = ",".join([a[0] + a[1] for a in line])
            geom["line"].append({"geom": "SRID={};LINESTRING({})".format(srid, ll), "osm_id": osm_id, "tags": relation["tags"]})
    return geom
示例#5
0
 def getArea(self):
     """Calculate glacier area (in km2) relative to reference box."""
     outline = self.terminus.union(self.referencebox)
     glacierpoly = ops.polygonize_full(outline)[0]
     if glacierpoly.is_empty:
         print('{}: Glacier {} trace and box do not overlap'.format(
             self.date, self.gid))
     area_km = glacierpoly.area / 10**6
     return area_km
示例#6
0
    def cleanup(self) -> str:
        if not self.geometry:
            return 'no cleanup since no geometry. have you run process yet?'

        multiline = sg.MultiLineString(self.geometry)
        #merge = ops.linemerge(multiline)

        result, dangles, cuts, invalids = ops.polygonize_full(self.geometry)
        self.polygons = list(result)
        return 'done'
示例#7
0
def getGlacierArea(terminus, box):
    """Get area of polygon created by intersection of glacier terminus and
    reference box. Default area for EPSG:3574 is m2; return in km2."""
    outline = terminus.geometry.union(box.geometry)
    poly = polygonize_full(outline)[0]
    if poly.is_empty:
        print("%s: Glacier %s trace and box do not overlap" %
              (terminus.Date, terminus.GlacierID))
    area_km = poly.area / 10**6
    return area_km
示例#8
0
def park_barriers(place, download_method, distance = 500.0, epsg = None, min_area = 100000):
    """
    The function downloads parks areas with a certain extent and converts them to LineString features. Parks may break continuity in the urban structure, besides being attractive areas for pedestrians.
        
    Parameters
    ----------
    place: string
        name of cities or areas in OSM: when using "OSMpolygon" please provide the name of a "relation" in OSM as an argument of "place"; when using "distance_from_address"
        provide an existing OSM address; when using "OSMplace" provide an OSM place name
    download_method: string {"polygon", "distance_from_address", "OSMplace"}
        it indicates the method that should be used for downloading the data.
    distance: float
        it is used only if download_method == "distance from address"
    epsg: int
        epsg of the area considered; if None OSMNx is used for the projection
    min_area: double
        parks with an extension smaller that this parameter are disregarded
      
    Returns
    -------
    park_barriers: LineString GeoDataFrame
        the park barriers GeoDataFrame
    """

    crs = 'EPSG:' + str(epsg)
    tags = {"leisure": True}
    parks_poly = _download_geometries(place, download_method, tags, crs, distance)
    
    parks_poly = parks_poly[parks_poly.leisure == 'park']
    parks_poly = parks_poly[~parks_poly['geometry'].is_empty] 
    parks_poly['area'] = parks_poly.geometry.area
    parks_poly = parks_poly[parks_poly.area >= min_area]
 
    pp = parks_poly['geometry'].unary_union  
    pp = polygonize_full(pp)
    parks = unary_union(pp).buffer(10).boundary # to simpify a bit
    parks = _simplify_barrier(parks)

    df = pd.DataFrame({'geometry': parks, 'type': ['park'] * len(parks)})
    park_barriers = gpd.GeoDataFrame(df, geometry = df['geometry'], crs = crs)
    
    return park_barriers
示例#9
0
def natural_polygons(points_df, polygon=None):
    """ Take a GeoDataFrame with points and return the natural polygons.

        Parameters:

        points_df (GeoDataFrame): points to process into natural polygons.
        polygon (Polygon GeoDataframe): Single polygon that encloses the points
    """
    coords = list(zip(points_df.geometry.x.values,
                      points_df.geometry.y.values))
    TIN = Delaunay(coords)
    # list of coordinates for each edge
    edges = []
    for tr in TIN.simplices:
        for i in range(3):
            edge_idx0 = tr[i]
            edge_idx1 = tr[(i + 1) % 3]
            edges.append(
                LineString((Point(TIN.points[edge_idx0]),
                            Point(TIN.points[edge_idx1]))))

    edges = {'geometry': edges}
    edges_df = gpd.GeoDataFrame(edges)
    edges_df['length'] = edges_df.geometry.length
    head = edges_df[edges_df['length'] < edges_df.mean(axis=0).length]
    head.crs = {'init': 'epsg:4326'}
    if polygon is not None:
        # use only lines within polygon
        head = gpd.sjoin(head, polygon, how='inner', op='within')
    linework = linemerge(head.geometry.values)
    linework = unary_union(linework)
    result, _, _, _ = polygonize_full(linework)
    result = unary_union(result)
    result = {'geometry': result}
    try:
        result_df = gpd.GeoDataFrame(result)
        result_df.crs = {'init': 'epsg:4326'}
    except:
        print(result)
        return None
        # result_df = gpd.GeoDataFrame({'geometry':[]})
    return (head, result_df)
示例#10
0
def railway_barriers(place, download_method, distance = 500.0, epsg = None, keep_light_rail = False):
    """
    The function downloads overground railway structures from OSM. Such structures can be considered barriers which shape the Image of the City and obstruct sight and movement.
        
    Parameters
    ----------
    place: string
        name of cities or areas in OSM: when using "OSMpolygon" please provide the name of a "relation" in OSM as an argument of "place"; when using "distance_from_address"
        provide an existing OSM address; when using "OSMplace" provide an OSM place name
    download_method: string {"polygon", "distance_from_address", "OSMplace"}
        it indicates the method that should be used for downloading the data.
    distance: float
        it is used only if download_method == "distance from address"
    epsg: int
        epsg of the area considered; if None OSMNx is used for the projection
    keep_light_rail: boolean
        considering light rail, like tramway
        
    Returns
    -------
    railway_barriers: LineString GeoDataFrame
        the railway barriers GeoDataFrame
    """    
    crs = 'EPSG:' + str(epsg)
    tags = {"railway":"rail"}
    railways = _download_geometries(place, download_method, tags, crs, distance)
    # removing light_rail, in case
    if not keep_light_rail:
        railways = railways[railways.railway != 'light_rail']
    if "tunnel" in railways.columns:
        railways["tunnel"].fillna(0, inplace = True)
        railways = railways[railways["tunnel"] == 0]     
    
    r = railways.unary_union
    p = polygonize_full(r)
    railways = unary_union(p).buffer(10).boundary # to simpify a bit
    railways = _simplify_barrier(railways)
        
    df = pd.DataFrame({'geometry': railways, 'type': ['railway'] * len(railways)})
    railway_barriers = gpd.GeoDataFrame(df, geometry = df['geometry'], crs = crs)
    
    return railway_barriers
示例#11
0
    def test_polygonize_full(self):

        lines2 = [
            ((0, 0), (1, 1)),
            ((0, 0), (0, 1)),
            ((0, 1), (1, 1)),
            ((1, 1), (1, 0)),
            ((1, 0), (0, 0)),
            ((5, 5), (6, 6)),
            ((1, 1), (100, 100)),
            ]

        result2, dangles, cuts, invalids = polygonize_full(lines2)
        self.assertEqual(len(result2), 2)
        self.assertTrue(all([isinstance(x, Polygon) for x in result2]))
        self.assertEqual(list(dangles.geoms), [])
        self.assertTrue(all([isinstance(x, LineString) for x in cuts.geoms]))

        self.assertEqual(
            dump_coords(cuts),
            [[(1.0, 1.0), (100.0, 100.0)], [(5.0, 5.0), (6.0, 6.0)]])
        self.assertEqual(list(invalids.geoms), [])
示例#12
0
    def test_polygonize_full(self):

        lines2 = [
            ((0, 0), (1, 1)),
            ((0, 0), (0, 1)),
            ((0, 1), (1, 1)),
            ((1, 1), (1, 0)),
            ((1, 0), (0, 0)),
            ((5, 5), (6, 6)),
            ((1, 1), (100, 100)),
        ]

        result2, dangles, cuts, invalids = polygonize_full(lines2)
        self.assertEqual(len(result2.geoms), 2)
        self.assertTrue(all([isinstance(x, Polygon) for x in result2.geoms]))
        self.assertEqual(list(dangles.geoms), [])
        self.assertTrue(all([isinstance(x, LineString) for x in cuts.geoms]))

        self.assertEqual(dump_coords(cuts),
                         [[(1.0, 1.0), (100.0, 100.0)], [(5.0, 5.0),
                                                         (6.0, 6.0)]])
        self.assertEqual(list(invalids.geoms), [])
示例#13
0
def polygonise_partitions(edges_gdf, column, convex_hull = True, buffer = 30):
    """
    Given districts assign to street segments it create polygons represint districts, either by creating a convex_hull for each group of segments or 
    simply polygonising them.
    
    Parameters
    ----------
    edges_gdf: LineString GeoDataFrame
        the street segments GeoDataFrame
    column: string
        the name of the column containing the district identifier
    convex_hull: boolean
        if trues creates create convex hulls after having polygonised the cluster of segments
    buffer: float
        desired buffer around the polygonised segments, before possibly obtaining the convex hulls
        
    Returns
    -------
    polygonised_partitions: Polygon GeoDataFrame
        a GeoDataFrame containing the polygonised partitions
    """
    
    polygons = []
    partitionIDs = []
    d = {'geometry' : polygons, column : partitionIDs}

    partitions = edges_gdf[column].unique()
    for i in partitions:
        polygon =  polygonize_full(edges_gdf[edges_gdf[column] == i].geometry.unary_union)
        polygon = unary_union(polygon).buffer(buffer)
        if convex_hull:
            polygons.append(polygon.convex_hull)
        else: 
            polygons.append(polygon)
        partitionIDs.append(i)

    df = pd.DataFrame(d)
    polygonised_partitions = gpd.GeoDataFrame(df, crs=edges_gdf.crs, geometry=df['geometry'])
    return polygonised_partitions
示例#14
0
def main():

    lines = [splineToLineString(sp) for sp in op.GetChildren()]

    result, dangles, cuts, invalids = polygonize_full(lines)

    if not result.is_empty:
        print(result)

    if not dangles.is_empty:
        print(dangles)

    if not cuts.is_empty:
        cuts_null = c4d.BaseObject(c4d.Onull)
        cuts_null.SetName("cuts")
        for line in cuts:
            sp = lineStringToSpline(line)
            sp.InsertUnderLast(cuts_null)
        doc.InsertObject(cuts_null)

    if not invalids.is_empty:
        print(invalids)
    c4d.EventAdd()
示例#15
0
api = overpy.Overpass()

stateRelation = api.query("rel(165475);(._;>;);out;")
fnwrRelation = api.query("rel(3947664);(._;>;);out;")
motorwaysInBoundingBox = api.query("way(32.120,-125.222,42.212,-113.928)[highway=motorway];(._;>);out;")
trunkInBoundingBox = api.query("way(32.120,-125.222,42.212,-113.928)[highway=trunk];(._;>);out;")
 
stateLineStrings = []
for way in stateRelation.ways:
 lineString = []
 for node in way.nodes:
  lineString.append((node.lon,node.lat))
 stateLineStrings.append(LineString(lineString))

polygons, dangles, cuts, invalids = polygonize_full(stateLineStrings)
statePolygonWithOcean = polygons.geoms[3]

statePolygon = statePolygonWithOcean.intersection(northAmericaPolygon)

fnwrLineStrings = []
for way in fnwrRelation.ways:
 lineString = []
 for node in way.nodes:
  lineString.append((node.lon,node.lat))
 fnwrLineStrings.append(LineString(lineString))

Fpolygons, dangles, cuts, invalids = polygonize_full(fnwrLineStrings)

motorwayLineStrings = []
for way in motorwaysInBoundingBox.ways:
示例#16
0
def polygonize_shapely_lines(shp, size_threshold = 0):
    
    result, _,_,_ = polygonize_full(shp)
    polys = MultiPolygon([i for i in result if i.area > size_threshold])
    
    return polys
def build_geometries(diff, srid):
    srid_txt = "EPSG:{}".format(srid)
    geom = {'line': [], 'point': [], 'polygon': []}
    for osm_id, node in diff['add']['node'].iteritems():
        x, y = projections.from4326(node["geom"], srid_txt)
        geom["point"].append({
            "geom": "SRID={};POINT({} {})".format(srid, x, y),
            "osm_id": osm_id,
            "tags": node["tags"]
        })
    for osm_id, way in diff['add']['way'].iteritems():
        line = projections.from4326(way["geom"], srid_txt)
        polygonize = check_tags_if_polygon(way["tags"])
        if polygonize:
            result, dangles, cuts, invalids = polygonize_full([line])
            #print result, dangles, cuts, invalids
            for poly in result:
                geom["polygon"].append({
                    "geom":
                    "SRID={};{}".format(srid, poly.wkt),
                    "osm_id":
                    osm_id,
                    "way_area":
                    poly.area,
                    "tags":
                    way["tags"]
                })
            for line in dangles:
                geom["line"].append({
                    "geom": "SRID={};{}".format(srid, line.wkt),
                    "osm_id": osm_id,
                    "tags": way["tags"]
                })
            for line in cuts:
                geom["line"].append({
                    "geom": "SRID={};{}".format(srid, line.wkt),
                    "osm_id": osm_id,
                    "tags": way["tags"]
                })
            for line in invalids:
                geom["line"].append({
                    "geom": "SRID={};{}".format(srid, line.wkt),
                    "osm_id": osm_id,
                    "tags": way["tags"]
                })
        else:
            ll = ",".join([a[0] + a[1] for a in line])
            geom["line"].append({
                "geom":
                "SRID={};LINESTRING({})".format(srid, ll),
                "osm_id":
                osm_id,
                "tags":
                way["tags"]
            })
    for osm_id, relation in diff['add']['relation'].iteritems():
        lines = [
            projections.from4326(way, srid_txt) for way in relation["geom"]
        ]
        polygonize = check_tags_if_polygon(relation["tags"])
        if polygonize:
            merged = linemerge(lines)
            polys = []
            lines = []
            for line in merged:
                if line.is_ring:
                    polys.append(line)
                else:
                    lines.append(line)
            if polys:
                # TODO: repair geometry
                poly = Polygon(polys[0], polys[1:])
                geom["polygon"].append({
                    "geom":
                    "SRID={};{}".format(srid, poly.wkt),
                    "osm_id":
                    -osm_id,
                    "way_area":
                    0,
                    "tags":
                    relation["tags"]
                })
            for line in lines:
                geom["line"].append({
                    "geom": "SRID={};{}".format(srid, line.wkt),
                    "osm_id": -osm_id,
                    "tags": relation["tags"]
                })

            #result, dangles, cuts, invalids = polygonize_full(lines)
            ##print result, dangles, cuts, invalids
            #result = list(result)
            #sd = result[0]
            #for poly in result[1:]:
            #sd = sd.symmetric_difference(poly)
            #geom["polygon"].append({"geom": "SRID={};{}".format(srid, sd.wkt), "osm_id": -osm_id, "way_area":sd.area, "tags": relation["tags"]})
            #for line in dangles:
            #geom["line"].append({"geom": "SRID={};{}".format(srid, line.wkt), "osm_id": -osm_id, "tags": relation["tags"]})
            #for line in cuts:
            #geom["line"].append({"geom": "SRID={};{}".format(srid, line.wkt), "osm_id": -osm_id, "tags": relation["tags"]})

        else:
            ll = ",".join([a[0] + a[1] for a in line])
            geom["line"].append({
                "geom":
                "SRID={};LINESTRING({})".format(srid, ll),
                "osm_id":
                osm_id,
                "tags":
                relation["tags"]
            })
    return geom
示例#18
0
def park_barriers(place,
                  download_method,
                  distance=None,
                  epsg=None,
                  min_area=100000):
    """
    The function downloads parks areas with a certain extent and converts them to LineString features. Parks may break continuity in the urban structure, besides being attractive areas for pedestrians.
        
    Parameters
    ----------
    place: string
        name of cities or areas in OSM: when using "OSMpolygon" please provide the name of a "relation" in OSM as an argument of "place"; when using "distance_from_address"
        provide an existing OSM address; when using "OSMplace" provide an OSM place name
    download_method: string, {"polygon", "distance_from_address", "OSMplace"}
        it indicates the method that should be used for downloading the data.
    distance: float
        it is used only if download_method == "distance from address"
    epsg: int
        epsg of the area considered; if None OSMNx is used for the projection
    min_area: double
        parks with an extension smaller that this parameter are disregarded
      
    Returns
    -------
    LineString GeoDataFrame
    """

    crs = {'init': 'epsg:' + str(epsg)}
    if download_method == 'distance_from_address':
        parks_polygon = ox.footprints_from_address(place,
                                                   distance=distance,
                                                   footprint_type="leisure",
                                                   retain_invalid=True)
    elif download_method == 'OSMplace':
        parks_polygon = ox.footprints_from_place(place,
                                                 footprint_type="leisure",
                                                 retain_invalid=True)
    else:
        parks_polygon = ox.footprints_from_polygon(place,
                                                   footprint_type="leisure",
                                                   retain_invalid=True)

    parks_polygon = parks_polygon[parks_polygon.leisure == 'park']
    ix_geo = parks_polygon.columns.get_loc("geometry") + 1
    to_drop = []

    for row in parks_polygon.itertuples():
        type_geo = None
        try:
            type_geo = row[ix_geo].geom_type
        except:
            to_drop.append(row.Index)

    parks_polygon.drop(to_drop, axis=0, inplace=True)
    parks_polygon = parks_polygon.to_crs(crs)
    parks_polygon.area = parks_polygon.geometry.area
    parks_polygon = parks_polygon[parks_polygon.area >= min_area]

    pp = parks_polygon['geometry'].unary_union
    pp = polygonize_full(pp)
    parks = unary_union(pp).buffer(10).boundary  # to simpify a bit
    parks = linemerge(parks)
    if parks.type != "LineString":
        features = [i for i in parks]
    else:
        features = [parks]
    features = [i for i in parks]

    df = pd.DataFrame({'geometry': features, 'type': ['park'] * len(features)})
    park_barriers = gpd.GeoDataFrame(df, geometry=df['geometry'], crs=crs)

    return park_barriers
示例#19
0
def railway_barriers(place,
                     download_method,
                     distance=None,
                     epsg=None,
                     keep_light_rail=False):
    """
    The function downloads overground railway structures from OSM. Such structures can be considered barriers which shape the Image of the City and obstruct sight and movement.
        
    Parameters
    ----------
    place: string
        name of cities or areas in OSM: when using "OSMpolygon" please provide the name of a "relation" in OSM as an argument of "place"; when using "distance_from_address"
        provide an existing OSM address; when using "OSMplace" provide an OSM place name
    download_method: string, {"polygon", "distance_from_address", "OSMplace"}
        it indicates the method that should be used for downloading the data.
    distance: float
        it is used only if download_method == "distance from address"
    epsg: int
        epsg of the area considered; if None OSMNx is used for the projection
    keep_light_rail: boolean
        considering light rail, like tramway
        
    Returns
    -------
    LineString GeoDataFrame
    """

    crs = {'init': 'epsg:' + str(epsg)}
    if download_method == 'distance_from_address':
        railway_graph = ox.graph_from_address(
            place,
            distance=distance,
            retain_all=True,
            truncate_by_edge=False,
            simplify=False,
            network_type='none',
            infrastructure='way["railway"~"rail"]')
    elif download_method == 'OSMplace':
        railway_graph = ox.graph_from_place(
            place,
            retain_all=True,
            truncate_by_edge=False,
            simplify=False,
            network_type='none',
            infrastructure='way["railway"~"rail"]')
    else:
        railway_graph = ox.graph_from_polygon(
            place,
            retain_all=True,
            truncate_by_edge=False,
            simplify=False,
            network_type='none',
            infrastructure='way["railway"~"rail"]')

    railways = ox.graph_to_gdfs(railway_graph,
                                nodes=False,
                                edges=True,
                                node_geometry=False,
                                fill_edge_geometry=True)
    railways = railways.to_crs(crs)
    if "tunnel" in railways.columns:
        railways["tunnel"].fillna(0, inplace=True)
        railways = railways[railways["tunnel"] == 0]
    r = railways.unary_union

    # removing light_rail, in case
    if not keep_light_rail:
        try:
            if download_method == 'distance_from_address':
                light_graph = ox.graph_from_address(
                    place,
                    distance=distance,
                    retain_all=True,
                    truncate_by_edge=False,
                    simplify=False,
                    network_type='none',
                    infrastructure='way["railway"~"light_rail"]')
            elif download_method == 'OSMplace':
                light_graph = ox.graph_from_place(
                    place,
                    retain_all=True,
                    truncate_by_edge=False,
                    simplify=False,
                    network_type='none',
                    infrastructure='way["railway"~"light_rail"]')
            else:
                light_graph = ox.graph_from_polygon(
                    place,
                    retain_all=True,
                    truncate_by_edge=False,
                    simplify=False,
                    network_type='none',
                    infrastructure='way["railway"~"light_rail"]')

            light_railways = ox.graph_to_gdfs(light_graph,
                                              nodes=False,
                                              edges=True,
                                              node_geometry=False,
                                              fill_edge_geometry=True)
            light_railways = light_railways.to_crs(crs)
            lr = light_railways.unary_union
            r = r.difference(lr)

        except ox.EmptyOverpassResponse as e:
            pass

    p = polygonize_full(r)
    railways = unary_union(p).buffer(10).boundary  # to simpify a bit
    railways = railways
    railways = linemerge(railways)
    if railways.type != "LineString":
        features = [i for i in railways]
    else:
        features = [railways]
    df = pd.DataFrame({
        'geometry': features,
        'type': ['railway'] * len(features)
    })
    railway_barriers = gpd.GeoDataFrame(df, geometry=df['geometry'], crs=crs)

    return railway_barriers
示例#20
0
文件: Shapely.py 项目: mctrinh/METIS
# Print line list with points
#print 'Lines with points: ', n_line
#for i in range(n_line):
#    print i+1, ' th lines with points : ', linepoints[i]
#print '\n'

# ==================================================
# Make mutilinestring from line list
# ==================================================
multilines = MultiLineString(linepoints)

x = multilines.intersection(multilines)

# Polygonize
result, dangles, cuts, invalids = polygonize_full(x)

result = MultiPolygon(result)
polygon = cascaded_union(result)

# Make mutilinestring from line list
#multilines = MultiLineString(linepoints)

# Polygonize
#result, dangles, cuts, invalids = polygonize_full(multilines)

#result = MultiPolygon(result)
#polygon = cascaded_union(result)

##################################################
multilines = polygon.boundary.union(result.boundary)
示例#21
0
line = LineString([(0,0),(10,0), (11,0), (11,1), (10,1), (20,0), (20,1), (22,1), (22,0), (30, 0), (30,-5), (21,-5), (21,.5)])
line1 = line.simplify(1, preserve_topology=True)



line = LineString([(0,0),(2,2), (4,-2), (6,2), (7,0), (8,0)])
splitter = LineString([(0,0),(8,0)])

line_a = LineString([(0,0),(10,0)])
line_b = LineString([(10,110),(20,20)])
line_c = LineString([(7,0),(9,0)])

line_split = split(line, splitter)
splitter_split = split(splitter, line)

pol = polygonize_full([line_split, splitter_split])

val_a = line_a.intersects(pol[0])
val_b = line_b.intersects(pol[0])
val_c = line_c.intersects(pol[0])


coords = [(0, 0), (0, 2), (1, 1), (2, 2), (2, 0), (1, 1), (0, 0)]
coords = [(0,0),(5,0), (10,0), (10,10), (5,10), (5,0), (5,-10),(0,-10), (0,0)]
bowtie = Polygon(coords)
va11 =  bowtie.is_valid
clean = bowtie.buffer(0)
val2 = clean.is_valid

l_a = [(0,0),(1,3),(2,-3),(10,10), (0,0)]
pol = Polygon (l_a)
示例#22
0
文件: county-map.py 项目: moohax/py
for way in countyRelation.ways:
 above = 1
 for node in way.nodes:
  if (node.lat < 33.6078):
   above = 0
 if (above):
  countyWaysAboveIslands.append(way)

countyLineStrings = []
for way in countyWaysAboveIslands:
 lineString = []
 for node in way.nodes:
  lineString.append((node.lon,node.lat))
 countyLineStrings.append(LineString(lineString))

polygons, dangles, cuts, invalids = polygonize_full(countyLineStrings)
countyPolygonWithOcean = polygons.geoms[0]

countyPolygon = countyPolygonWithOcean.intersection(coastBox)

highwayLineStrings = []
for way in primaryHighwaysInBoundingBox.ways:
 line = []
 for node in way.nodes:
  line.append((node.lon,node.lat))
 wayLineString = LineString(line)
 if countyPolygon.contains(wayLineString): highwayLineStrings.append(wayLineString)

for way in secondaryHighwaysInBoundingBox.ways:
 line = []
 for node in way.nodes:
示例#23
0
def count(query_result):
    """
    Count Polygon overlaps.
    Only valid geometries will be included in the process.
    Polygon's crossing the dateline will be split along the meridian.

    The method is general enough to be used for any polygonal
    geometry input. As such, depending on the complexity of the
    geometry, and the number of features, this could take a while.

    The generic method is:
        * convert to line features (union then preserves nodes)
        * union all line geometries
        * polygonise unioned features (non-overlapping polygons)
        * create centroids of each non-overlapping polygon
        * spatial join (centroids contained within overlapping polygons)
        * summarise counts per non-overlapping polygon

    Refined smarts such as merging by a region identifier, i.e.
    Landsat Path/Row, or Sentinel-2 MGRS Tile ID, could significantly
    reduce the computational cost, in terms of processing time and
    memory used.
    However, the first cut is to have something generic that works
    across all sensor acquisition footprints.

    :param query_result:
        A GeoJSON dict as returned by `count_overlaps.query`.

    :return:
        A GeoDataFrame containing frequency counts determined by
        overlapping Polygons.
    """
    # temporarily write the result to disk
    with tempfile.TemporaryDirectory() as tmpdir:
        fname = str(Path(tmpdir, 'query.geojson'))
        with open(fname, 'w') as src:
            json.dump(query_result, src, indent=4)

        df = geopandas.read_file(fname)

    # TODO Look into the cause of invalid geometry and fix if required
    # only keep valid geometry
    valid = df[df.is_valid].copy()

    # manual workaround as geopandas returns errors when doing a unary_union
    # split geometries along the dateline meridian
    # convert geometries to line features (simplest is to use exterior),
    # that way the union will preserve the nodes
    exteriors = []
    for _, row in valid.iterrows():
        # split any geometries crossing the dateline
        split = transform_geom(df.crs,
                               df.crs,
                               mapping(row.geometry),
                               antimeridian_cutting=True)

        geom = shape(split)
        if isinstance(geom, MultiPolygon):
            exteriors.extend([g.exterior for g in geom])
        else:
            exteriors.append(geom.exterior)

    # union the line features, and convert to polygons
    union = unary_union(exteriors)
    result, dangles, cuts, invalids = polygonize_full(union)

    # separate the multi-geometry feature into individual features
    explode = [p for p in result]

    # insert into geopandas and create centroids
    non_overlaps = geopandas.GeoDataFrame({'fid': range(len(explode))},
                                          geometry=explode,
                                          crs=df.crs)
    non_overlaps['centroid'] = non_overlaps.centroid
    non_overlaps.set_geometry('centroid', inplace=True)

    # spatial join (centroids within observations)
    sjoin = geopandas.sjoin(non_overlaps, valid, how='left', op='within')

    # overlap count per centroid
    # overlap_count = sjoin.groupby(['fid']).agg(['count'])
    overlap_count = sjoin.groupby(['fid'
                                   ]).size().reset_index(name='observations')
    overlap_count.set_index('fid', inplace=True)

    # table join the centroid overlap count with the non-overlapping geometry
    non_overlaps.set_geometry('geometry', inplace=True)
    tjoin = non_overlaps.join(overlap_count, on='fid')

    # reset the non-overlapping polygons as the geometry source for the dataframe
    tjoin.set_geometry('geometry', inplace=True)
    tjoin.drop('centroid', axis=1, inplace=True)

    return tjoin
示例#24
0
def dissolve_roundabouts(nodes_gdf,
                         edges_gdf,
                         max_length_segment=80,
                         angle_tolerance=40):
    """
    Parameters
    ----------
    nodes_gdf: Point GeoDataFrame
        the street junctions GeoDataFrame
    edges_gdf: LineString GeoDataFrame
        the street segment GeoDataFrame
    max_length_segment: float
        if a segment in the possible roundbout-like junction is longer than this threshold, the junction examined is not considered a roundabout
    angle_tolerance: float
        if two segments in the possible roundbout-like junction form an angle whose magnitude is higher than this threshold, the junction examined is not
        considered a roundabout
   
    Returns
    -------
    nodes_gdf, edges_gdf: tuple of GeoDataFrames
        the simplified GeoDataFrames
    """

    nodes_gdf.index, edges_gdf.index = nodes_gdf.nodeID, edges_gdf.edgeID
    nodes_gdf.index.name, edges_gdf.index.name = None, None
    nodes_gdf, edges_gdf = nodes_gdf.copy(), edges_gdf.copy()

    ix_geo = edges_gdf.columns.get_loc("geometry") + 1
    ix_u, ix_v = edges_gdf.columns.get_loc("u") + 1, edges_gdf.columns.get_loc(
        "v") + 1

    processed_segments = []
    processed_nodes = []

    # editing the ones which only connect three edges
    to_edit = {k: v for k, v in nodes_degree(edges_gdf).items() if v == 3}
    if len(to_edit) == 0:
        return (nodes_gdf, edges_gdf)

    to_edit_gdf = nodes_gdf[nodes_gdf.nodeID.isin(list(to_edit.keys()))].copy()
    if len(to_edit_gdf) == 0:
        return nodes_gdf, edges_gdf

    for node in to_edit_gdf.itertuples():

        if node.Index in processed_nodes:
            continue
        tmp = edges_gdf[(edges_gdf['u'] == node.Index) |
                        (edges_gdf['v'] == node.Index)].copy()
        found = False
        not_a_roundabout = False
        sc, sc_last_vertex = None, None

        # take one of these lines and examine its relationship with the others at the same junction
        for row in tmp.itertuples():
            if row[ix_geo].length > max_length_segment:
                continue  #too long for being a roundabout segment
            sequence_nodes = [node.Index]
            sequence_segments = [row.Index]
            if row.Index in processed_segments:
                continue

            if row[ix_u] == node.Index:
                last_vertex = row[ix_v]
            else:
                last_vertex = row[ix_u]

            sequence_nodes.append(last_vertex)
            segment = row
            distance = 0
            second_candidate = False

            while not found:
                if distance >= 400:
                    break  # too much traversed distance for a roundabout
                if last_vertex in processed_nodes:  # the node has been dissolved already
                    if not second_candidate:
                        break
                    distance -= segment[ix_geo].length
                    segment = sc
                    distance += segment[ix_geo].length
                    sequence_segments[-1] = segment[0]
                    last_vertex = sc_last_vertex
                    sequence_nodes[-1] = sc_last_vertex
                    second_candidate = False
                    continue

                possible_connectors = edges_gdf[
                    (edges_gdf['u'] == last_vertex) |
                    (edges_gdf['v'] == last_vertex)].copy()
                for connector in possible_connectors.itertuples():

                    if (segment[0] == connector.Index) | (
                            connector.Index in processed_segments):
                        possible_connectors.drop(connector.Index,
                                                 axis=0,
                                                 inplace=True)
                    elif connector[ix_geo].length > max_length_segment:
                        possible_connectors.drop(connector.Index,
                                                 axis=0,
                                                 inplace=True)
                    else:
                        angle = angle_line_geometries(segment[ix_geo],
                                                      connector[ix_geo],
                                                      angular_change=True,
                                                      degree=True)
                        if angle > angle_tolerance:
                            possible_connectors.drop(connector.Index,
                                                     axis=0,
                                                     inplace=True)
                        else:
                            possible_connectors.at[connector.Index,
                                                   'angle'] = angle

                if (len(possible_connectors) == 0) | (last_vertex
                                                      in processed_nodes):
                    if not second_candidate:
                        break
                    else:
                        distance -= segment[ix_geo].length
                        segment = sc
                        distance += segment[ix_geo].length
                        sequence_segments[-1] = segment[0]
                        last_vertex = sc_last_vertex
                        sequence_nodes[-1] = sc_last_vertex
                        second_candidate = False
                        continue

                else:
                    possible_connectors.sort_values(by='angle',
                                                    ascending=True,
                                                    inplace=True)

                segment = list(possible_connectors.iloc[0])
                segment.insert(0, possible_connectors.iloc[0].name)

                if len(possible_connectors) > 1:
                    sc = list(possible_connectors.iloc[1])
                    sc.insert(0, possible_connectors.iloc[1].name)
                    second_candidate = True
                    if sc[ix_u] == last_vertex:
                        sc_last_vertex = sc[ix_v]
                    else:
                        sc_last_vertex = sc[ix_u]

                if segment[ix_u] == last_vertex:
                    last_vertex = segment[ix_v]
                else:
                    last_vertex = segment[ix_u]
                sequence_nodes.append(last_vertex)
                sequence_segments.append(segment[0])
                distance += segment[ix_geo].length

                if last_vertex == node.Index:
                    lm = linemerge(edges_gdf.loc[i].geometry
                                   for i in sequence_segments)
                    roundabout = polygonize_full(lm)[0]
                    if len(roundabout) == 0:
                        not_a_roundabout = True
                        break

                    centroid = roundabout.centroid
                    distances = [
                        nodes_gdf.loc[i].geometry.distance(centroid)
                        for i in sequence_nodes
                    ]
                    shortest, longest, mean = min(distances), max(
                        distances), statistics.mean(distances)

                    if (shortest < mean * 0.80) | (longest > mean * 1.20):
                        not_a_roundabout = True
                        break

                    found = True
                    new_index = max(nodes_gdf.index) + 1

                    nodes_gdf.loc[new_index] = nodes_gdf.loc[node.Index]
                    nodes_gdf.at[new_index, 'nodeID'] = new_index
                    nodes_gdf.at[new_index, 'geometry'] = centroid
                    nodes_gdf.at[new_index, 'x'] = centroid.coords[0][0]
                    nodes_gdf.at[new_index, 'y'] = centroid.coords[0][1]
                    processed_segments = processed_segments + sequence_segments
                    processed_nodes = processed_nodes + sequence_nodes + [
                        new_index
                    ]
                    edges_gdf.loc[edges_gdf['u'].isin(sequence_nodes),
                                  'u'] = new_index
                    edges_gdf.loc[edges_gdf['v'].isin(sequence_nodes),
                                  'v'] = new_index
                    nodes_gdf.drop(sequence_nodes, axis=0, inplace=True)
                    edges_gdf.drop(sequence_segments, axis=0, inplace=True)

            if not_a_roundabout:
                break
            if found:
                break

    edges_gdf = correct_edges(nodes_gdf, edges_gdf)
    nodes_gdf, edges_gdf = clean_network(nodes_gdf,
                                         edges_gdf,
                                         dead_ends=True,
                                         remove_disconnected_islands=False,
                                         same_uv_edges=True,
                                         self_loops=True)

    return nodes_gdf, edges_gdf