コード例 #1
0
ファイル: osm_xml.py プロジェクト: hobama/V2V-OSM
def osm_net_download(polygon,
                     network_type='all_private',
                     timeout=180,
                     memory=None,
                     max_query_area_size=50 * 1000 * 50 * 1000):
    """Download OSM ways and nodes within a polygon from the Overpass API"""

    osm_filter = osmnx.get_osm_filter(network_type)
    response_xmls = []

    if memory is None:
        maxsize = ''
    else:
        maxsize = '[maxsize:{}]'.format(memory)

    geometry_proj, crs_proj = osmnx.project_geometry(polygon)
    geometry_proj_cons_subdiv = osmnx.consolidate_subdivide_geometry(
        geometry_proj, max_query_area_size=max_query_area_size)
    geometry, _ = osmnx.project_geometry(geometry_proj_cons_subdiv,
                                         crs=crs_proj,
                                         to_latlong=True)
    polygon_coord_strs = osmnx.get_polygons_coordinates(geometry)

    for polygon_coord_str in polygon_coord_strs:
        query_template = \
            '[out:xml][timeout:{timeout}]{maxsize};' + \
            '(way["highway"]{filters}(poly:"{polygon}");>;);out;'
        query_str = query_template.format(polygon=polygon_coord_str,
                                          filters=osm_filter,
                                          timeout=timeout,
                                          maxsize=maxsize)
        response_xml = overpass_request(data={'data': query_str},
                                        timeout=timeout)
        response_xmls.append(response_xml)
    return response_xmls
コード例 #2
0
def get_bufferedstudyregion_polygon(filepath, buffer_dist=1e4, crs=None, to_crs=None, to_latlong=True):
    """
    Convert a GeoDataFrame from file to shapely polygon with the UTM zone appropriate for its geometries'
    centroid.
    
    Parameters
    ----------
    filepath : String
        the name of the shapefile path(including file extension)
    buffer_dist : float 
        distance to buffer around the place geometry, in meters
    crs : dict 
        the starting coordinate reference system of the passed-in geometry, 
        default value (None) will set settings.default_crs as the CRS
    to_crs : dict
        if not None, just project to this CRS instead of to UTM
    to_latlong : bool
        if True, projects to latlong instead of to UTM
    Returns
    -------
    tuple
        (geometry_proj, crs), the projected shapely geometry and the crs of the
        projected geometry
    """
    # load shaplefile
    shape = gpd.GeoDataFrame.from_file(filepath)
    # create buffer
    polygon = shape['geometry'].iloc[0]
    buffer_polygon = polygon.buffer(buffer_dist)
    # Project a shapely Polygon from lat-long to UTM, or vice-versa 
    polygon_buffer_prof = ox.project_geometry(buffer_polygon, crs=crs, to_crs=to_crs, to_latlong=to_latlong)
    return polygon_buffer_prof
コード例 #3
0
def analyze_city(boundary, crs, local_edges):
    """Analyze correspondence between Local and OSM bikeways throughout a city.

    Parameters
    ----------
    boundary : :class:`shapely.geometry.Polygon`
        City boundary projected in WGS 84

    crs : epsg coordinate system 
        Local coordinate system in meters (e.g., UTM 10: {'init': 'epsg:26910'})

    local_edges : :class:`geopandas.GeoDataFrame`
        Output from `structure_bikeways_shapefile`

    Returns
    -------
    :obj:`tuple`
        * :class:`pandas.DataFrame`
            Output from `summarize_bikeway_correspondance`
        * :class:`geopandas.GeoDataFrame`
            OSM edges with OSM and local bikeway data attached
    """

    # Define OSM tag filter
    # Importantly, no paths with 'highway':'service' or 'highway':'motorway' tags will be returned
    tag_filter = (
        '["area"!~"yes"]["highway"!~"service|footway|motor|proposed|construction|abandoned|platform|raceway"]'
        '["bicycle"!~"no"]["access"!~"private"]')

    # Download the OSM data
    overpass_jsons = ox.osm_net_download(boundary, custom_filter=tag_filter)
    overpass_json = sp.merge_overpass_jsons(overpass_jsons)

    # Define bikeway columns and associated labels
    bikeway_types = [
        'off_street_path', 'bike_blvd', 'separated_bike_lane', 'bike_lane',
        'shoulder', 'sharrow', 'bike_route'
    ]

    # Parse Overpass JSON into bikeway types
    overpass_parsed = sp.parse_osm_tags(overpass_json,
                                        bikeway_types,
                                        true_value=1,
                                        false_value=0,
                                        none_value=np.nan)

    # Specify attributes to include in graph
    path_tags = (bikeway_types + ['highway'])
    ox.config(useful_tags_path=path_tags)
    # Convert json to graph
    G = ox.create_graph([overpass_parsed])
    # Simply graph by removing all nodes that are not intersections or dead ends
    G = ox.simplify_graph(G, strict=True)
    # Make graph undirected
    G = nx.to_undirected(G)
    # Convert graph to geodataframes
    osm_edges = ox.graph_to_gdfs(G, nodes=False)
    # Project to local coordinate system
    osm_edges = osm_edges.to_crs(crs)

    # Project city boundary to local coordinate system
    boundary, _ = ox.project_geometry(boundary, to_crs=crs)

    # Constrain edges to those intersecting the city boundary polygon
    osm_edges = sp.gdf_intersecting_polygon(osm_edges, boundary)

    # Summarize bikeway values stored in lists
    osm_edges[bikeway_types] = osm_edges[bikeway_types].applymap(
        lambda x: sp.nan_any(x, 1, np.nan))

    # Idenfity largest available highway type
    def largest_highway(highways):
        # Specify highway order,
        # largest (least bikable) to smallest (most bikable)
        highway_order = [
            'trunk', 'primary', 'secondary', 'tertiary', 'unclassified',
            'residential', 'living_street', 'cycleway'
        ]
        highways = sp.listify(highways)
        # Strip '_link' from tags
        highways = [x[:-5] if x[-5:] == '_link' else x for x in highways]
        # If list includes one of these tags, return the biggest one
        ranked_highways = [x for x in highways if x in highway_order]
        if len(ranked_highways) > 0:
            ranks = [highway_order.index(x) for x in ranked_highways]
            return highway_order[min(ranks)]
        # Otherwise, return 'other'
        else:
            return 'other'

    osm_edges['highway'] = osm_edges['highway'].apply(largest_highway)

    # Restrict edges to bikeable highway types
    bikable = [
        'primary', 'secondary', 'tertiary', 'unclassified', 'residential',
        'living_street', 'cycleway'
    ]
    osm_edges = osm_edges[osm_edges['highway'].isin(bikable)].copy()

    # Project local edges to local coordinate system
    local_edges = local_edges.to_crs(crs)

    # Restrict to local edges intersecting the city boundary
    local_edges = sp.gdf_intersecting_polygon(local_edges, boundary)

    # Match local edges to OSM edges
    analysis_columns = bikeway_types + ['geometry']
    # Match dataframes
    osm_matches = sp.match_lines_by_hausdorff(
        sp.select_columns(
            osm_edges, analysis_columns,
            suffix='_osm').rename(columns={'geometry_osm': 'geometry'}),
        sp.select_columns(
            local_edges, analysis_columns,
            suffix='_local').rename(columns={'geometry_local': 'geometry'}),
        constrain_target_features=True,
        distance_tolerance=20,
        azimuth_tolerance=20,
        match_fields=True)

    # Identify local and osm bikeway columns
    joint_bikeway_cols = [
        column for column in osm_matches.columns
        if any(bikeway in column for bikeway in bikeway_types)
    ]

    # Reduce lists to a single single binary value
    osm_matches[joint_bikeway_cols] = osm_matches[joint_bikeway_cols].applymap(
        lambda x: sp.nan_any(x, 1, np.nan))

    # Drop records without a bikeway in either dataset
    osm_matches = osm_matches.dropna(how='all', subset=joint_bikeway_cols)

    # Reclassify NaN values as 0
    osm_matches = osm_matches.fillna(0)

    # Function fo calculate composite bikeways
    def composite_columns(matches, columns, suffix):
        # Select relevent columns
        relevent_columns = sp.select_columns(matches,
                                             [x + suffix for x in columns])
        # Assess whether there are any values of 1 across each row
        return relevent_columns.apply(lambda x: sp.nan_any(x, 1, 0), axis=1)

    # Define exclusive and shared bikeway types
    exclusive_bikeways = ['bike_lane', 'separated_bike_lane']
    shared_bikeways = ['bike_blvd', 'sharrow', 'bike_route']

    # Calculate composite of exclusive bikeways
    osm_matches['exclusive_bikeway_osm'] = composite_columns(
        osm_matches, exclusive_bikeways, '_osm')
    osm_matches['exclusive_bikeway_local'] = composite_columns(
        osm_matches, exclusive_bikeways, '_local')

    # Calculate composite of shared bikeways
    osm_matches['shared_bikeway_osm'] = composite_columns(
        osm_matches, shared_bikeways, '_osm')
    osm_matches['shared_bikeway_local'] = composite_columns(
        osm_matches, shared_bikeways, '_local')

    # Calculate composite of all bikeways
    osm_matches['any_bikeway_osm'] = composite_columns(osm_matches,
                                                       bikeway_types, '_osm')
    osm_matches['any_bikeway_local'] = composite_columns(
        osm_matches, bikeway_types, '_local')

    # Calculate the length of each edge
    osm_matches['length'] = osm_matches['geometry'].apply(lambda x: x.length)

    # Add labels to bikeway types
    bikeway_labels = [
        'Off Street Path', 'Bike Boulevard', 'Separated Bike Lane',
        'Bike Lane', 'Shoulder', 'Sharrow', 'Bike Route'
    ]
    bikeway_labels = OrderedDict(zip(bikeway_types, bikeway_labels))
    # Add labels for composite bikeway types
    bikeway_labels.update({'exclusive_bikeway': 'Exclusive'})
    bikeway_labels.update({'shared_bikeway': 'Shared'})
    bikeway_labels.update({'any_bikeway': 'Any'})

    # Calculate summaries
    summaries = summarize_bikeway_correspondance(osm_matches, bikeway_labels)

    return summaries, osm_matches
コード例 #4
0
ID_nodes_quartier = dict()
keys = [
    'n', 'm', 'k_avg', 'streets_per_node_avg', 'edge_length_total',
    'edge_length_avg', 'street_length_total', 'street_length_avg',
    'circuity_avg'
]
records = list()

for i in range(n_zone):
    #estrazione del poligono relativo alla zona catastale
    pol = zone.loc[i, 'WKT']
    polygon = loads(pol)
    g_zona = ox.graph_from_polygon(polygon,
                                   network_type='drive_service',
                                   retain_all=True)
    polygon, _ = ox.project_geometry(polygon, to_latlong=False)
    area = unary_union(polygon).area / 10**6
    try:
        stats = ox.basic_stats(g_zona)
        stats = dict((k, stats[k]) for k in keys)
        stats['area'] = area
        stats['Zona'] = zone.loc[i, 'Name'][18:]
        stats['avg_betweenness'] = mean(
            nx.betweenness_centrality(g_zona).values())
        stats['avg_closeness'] = mean(nx.closeness_centrality(g_zona).values())
        G = nx.DiGraph()
        for u, v, data in g_zona.edges(data=True):
            w = data['length']  # if 'length' in data else 1.0
            if not G.has_edge(u, v):
                G.add_edge(u, v, weight=w)
        stats['avg_clustering'] = mean(nx.clustering(G).values())
コード例 #5
0
fig, ax = ox.plot_graph(g1,
                        node_color=cc,
                        node_edgecolor=cc,
                        fig_height=11,
                        fig_width=11,
                        show=False,
                        close=False,
                        node_size=4,
                        node_zorder=2,
                        edge_linewidth=0.4,
                        margin=0)

for i in range(n_quartieri):
    polygon = quartieri_napoli['geometry'].iloc[i]
    polygon, _ = ox.project_geometry(polygon,
                                     crs={'init': 'epsg:32633'},
                                     to_latlong=True)
    polypatch = PolygonPatch(polygon,
                             alpha=1,
                             zorder=2,
                             color='black',
                             fill=False)
    ax.add_patch(polypatch)

plt.title('Spinglass', {'fontsize': 23})
fig_file_name = os.path.join(picture_dir, 'splinglass')
plt.savefig(fig_file_name, bbox_inches='tight')

# ### Eigenvector

# In[34]:
コード例 #6
0
def get_buildings(place, polygon):
    """
    Get Buildings Data

    Parameters
    ----------
    place :
      input place
    polygon :
      polygon for Buildings Data

    Returns
    Buildings Data
    """

    max_query_area_size = 3000000000
    maxsize = ''
    timeout = 180

    # requesting polygon geometry and projection
    geometry_proj, crs_proj = ox.project_geometry(polygon)
    # subdividing area if area is big and exceeds max_query_area_size
    geometry_proj_consolidated_subdivided = ox.consolidate_subdivide_geometry(
        geometry_proj, max_query_area_size=max_query_area_size)

    # getting geometry of subpart
    geometry, _ = ox.project_geometry(geometry_proj_consolidated_subdivided,
                                      crs=crs_proj,
                                      to_latlong=True)

    # get polygon coordinates
    polygon_coord_strs = ox.get_polygons_coordinates(geometry)

    print('Requesting building footprint data')
    start_time = time.time()

    # pass each polygon coordinates in the list to Overpass API
    response_jsons = []
    for polygon_coord_str in polygon_coord_strs:
        query_template = ('[out:json][timeout:{timeout}]{maxsize};(way'
                          '(poly:"{polygon}")["building"];(._;>;);relation'
                          '(poly:"{polygon}")["building"];(._;>;););out;')
        query_str = query_template.format(polygon=polygon_coord_str,
                                          timeout=timeout,
                                          maxsize=maxsize)
        # call overpass API with query
        response_json = call_overpass(data={'data': query_str})
        response_jsons.append(response_json)

    # collectiong Buildings
    vertices = {}
    for response in response_jsons:
        for result in response['elements']:
            if 'type' in result and result['type'] == 'node':
                vertices[result['id']] = {
                    'lat': result['lat'],
                    'lon': result['lon']
                }

    buildings = {}
    for response in response_jsons:
        for result in response['elements']:
            if 'type' in result and result['type'] == 'way':
                nodes = result['nodes']
                try:
                    polygon = Polygon([(vertices[node]['lon'],
                                        vertices[node]['lat'])
                                       for node in nodes])
                except Exception:
                    print('Polygon has invalid geometry: {}'.format(nodes))
                building = {'nodes': nodes, 'geometry': polygon}

                if 'tags' in result:
                    for tag in result['tags']:
                        building[tag] = result['tags'][tag]

                buildings[result['id']] = building

    # converting it into geo pandas
    df_building = gpd.GeoDataFrame(buildings).T
    df_building.crs = {'init': 'epsg:4326'}

    # drop all invalid geometries
    df_building = df_building[df_building['geometry'].is_valid]

    df_building["osm_id"] = df_building.index
    df_building.reset_index(drop=True, inplace=True)
    df_building.gdf_name = str(
        place['state']
    ) + '_buildings' if not place['state'] is None else 'buildings'
    columns_of_interest = [
        "amenity", "landuse", "leisure", "shop", "man_made", "building",
        "building:use", "building:part", "osm_id", "geometry", "height_tags"
    ]

    # drop all unused columns
    df_building.drop([
        col
        for col in list(df_building.columns) if col not in columns_of_interest
    ],
                     axis=1,
                     inplace=True)
    return df_building
コード例 #7
0
def get_poi_data(place, polygon):
    """
    Get POI Data

    Parameters
    ----------
    place :
      input place
    polygon :
      polygon for POI Data

    Returns
    POI Data
    """

    max_query_area_size = 3000000000
    maxsize = ''
    timeout = 180

    # requesting polygon geometry and projection
    geometry_proj, crs_proj = ox.project_geometry(polygon)

    # subdividing area if area is big and exceeds max_query_area_size
    geometry_proj_consolidated_subdivided = ox.consolidate_subdivide_geometry(
        geometry_proj, max_query_area_size=max_query_area_size)

    # getting geometry of subpart
    geometry, _ = ox.project_geometry(geometry_proj_consolidated_subdivided,
                                      crs=crs_proj,
                                      to_latlong=True)

    # get polygon coordinates
    polygon_coord_strs = ox.get_polygons_coordinates(geometry)

    print('Requesting POI data')
    start_time = time.time()

    # pass each polygon coordinates in the list to Overpass API
    response_jsons = []
    for polygon_coord_str in polygon_coord_strs:
        query_template = ('[out:json][timeout:{timeout}]{maxsize};('
                          '(node["office"](poly:"{polygon}"););'
                          '(node["shop"](poly:"{polygon}"););'
                          '(node["amenity"](poly:"{polygon}"););'
                          '(node["leisure"](poly:"{polygon}"););'
                          '(node["building"](poly:"{polygon}"););'
                          '(node["sport"](poly:"{polygon}");););out;')
        query_str = query_template.format(polygon=polygon_coord_str,
                                          timeout=timeout,
                                          maxsize=maxsize)
        # call overpass API with query
        response_json = call_overpass(data={'data': query_str})
        response_jsons.append(response_json)

    # collectiong POI
    vertices = {}
    for response in response_jsons:
        for result in response['elements']:
            if 'type' in result and result['type'] == 'node':

                point = Point(result['lon'], result['lat'])

                POI = {'geometry': point}

                if 'tags' in result:
                    for tag in result['tags']:
                        POI[tag] = result['tags'][tag]

                vertices[result['id']] = POI

    # converting it into geo pandas
    df_poi = gpd.GeoDataFrame(vertices).T
    df_poi.crs = {'init': 'epsg:4326'}

    try:
        # drop all invalid geometries
        df_poi = df_poi[df_poi['geometry'].is_valid]
    except BaseException:
        # Empty data frame
        # Create one-row data frame with null information
        point = polygon.centroid
        data = {"geometry": [point], "osm_id": [0]}
        df_poi = gpd.GeoDataFrame(data, crs={'init': 'epsg:4326'})

    df_poi["osm_id"] = df_poi.index
    df_poi.reset_index(drop=True, inplace=True)
    df_poi.gdf_name = str(
        place['state']) + '_points' if not place['state'] is None else 'points'
    columns_of_interest = [
        "amenity", "landuse", "leisure", "shop", "man_made", "building",
        "building:use", "building:part", "osm_id", "geometry"
    ]

    # drop all unused columns
    df_poi.drop([
        col for col in list(df_poi.columns) if col not in columns_of_interest
    ],
                axis=1,
                inplace=True)
    return df_poi
コード例 #8
0
        with zipfile.ZipFile(counties_shapefile_zip, 'r') as zip_file:
            zip_file.extractall(counties_shapefile_dir)
    os.remove(counties_shapefile_zip)

counties = gpd.read_file(counties_shapefile_dir)

# retain only those tracts that are in the bay area counties
mask = (counties['STATEFP'] == '06') & (counties['COUNTYFP'].isin(
    bayarea.values()))
gdf_bay = counties[mask]

bayarea_polygon = gdf_bay.unary_union

# get the convex hull, otherwise we'll cut out bridges over the bay
bayarea_polygon = bayarea_polygon.convex_hull
bayarea_polygon_proj, crs = ox.project_geometry(bayarea_polygon)

# get the simplified graph for the drive network
G = ox.graph_from_polygon(bayarea_polygon,
                          network_type='drive',
                          simplify=False)

# filter way types
types = [
    'motorway', 'motorway_link', 'trunk', 'trunk_link', 'primary',
    'primary_link', 'secondary', 'secondary_link', 'tertiary', 'tertiary_link',
    'unclassified', 'road'
]

minor_streets = [(u, v, k) for u, v, k, d in G.edges(keys=True, data=True)
                 if d['highway'] not in types]