Ejemplo n.º 1
0
})  # initialise groundwater object
# Notes: use list(surfacewater.__dict__) to list all the arguments of the namespace.
surfacewater.poly = Polygon(
    shell=[(0, 0), (0, -0.9), (5, -0.9), (30, -6.01), (35, -6.01), (35,
                                                                    0), (0,
                                                                         0)])
# Notes: use list(surfacewater.__dict__) to list all the arguments of the namespace.
groundwater.poly = Polygon(
    shell=[(0, -0.9), (5, -0.9), (30,
                                  -6.01), (35,
                                           -6.01), (35,
                                                    -20), (0, -20), (0, -0.9)])

ls1 = LineString([(5, -0.9), (30, -6.01)])
ls2 = LineString([(30, -6.01), (35, -6.01)])
beach.polyline = MultiLineString(lines=[ls1, ls2])

print('working on surface water element and groundwater element ...')
t = time.time()
area_threshold_sw_gw_elements_m2 = 0.10  #0.11 #0.12
line_threshold_beach_m = 0.51  #0.5 #0.49 #0.47 #0.46 #0.47 # 0.48 # 0.49 # 0.5 # 0.45 #.55
area_threshold_gw_connection_m2 = 0  #0.0001 #0.001  0.002 some horizontal cells are still been considered as surface water cells
area_threshold_sw_connection_m2 = 0.004  #0.003 has some vertical connection that could be ideally groundwater #0.004 too much #0.001

surfacewater.element.result = intersect(
    element_cells,
    surfacewater.poly,
    shptype="POLYGON",
    length_area_greater_than=area_threshold_sw_gw_elements_m2)
#groundwater.element.result  = intersect(element_cells, groundwater.poly , shptype="POLYGON",length_area_greater_than = area_threashhold_sw_gw_elements_m2)
#method 2
def get_minc_division_prediction(boundaryVerts,
                                 beta=1,
                                 numMTs=100,
                                 spindleLen='default',
                                 plot=1):
    """ Implement "Minc" spindle orientation model.
    Params:

    boundaryVerts (list): A list of (x,y) coords representing the cell boundary pixels.

    beta (double): Force-length power law scaling for MT forces: f = l^beta, where l=length of MT.

    numMTs (int): How many MTs are placed around the cell (uniformly distributed).

    spindleLen (double/string): Size of the spindle. (distance between centrosomes). Can manually set a double, or pass "default" to use sqrt(area/pi).

    plot (bool): To plot results, or not.
    """

    # Get the list of angles to distribute MTs
    spindleAngles = np.linspace(-np.pi / 2, np.pi / 2, 100)
    # Define how far the initial MT lines should extend (just needs to cross the
    # cortex so we can see the distance to crossing.
    MTexpansionLen = 100
    numMTs = 100
    # # Make some generic cell shapes for testing:
    # # polygon = Polygon([(0, 0), (1, 0), (1.5, 0.5), (1, 1), (0, 1)])
    # # polygon = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)]) # Square
    # polygon = Polygon([(0, 0), (2, 0), (2, 1), (0, 1)]) # Rectangle
    # polygon = Polygon([(0, 0), (2, 0), (1, 1)]) # Triangle
    # # Hexagons
    # # Stretched
    # xs = 1 + np.cos(np.linspace(0, 2*np.pi, 100))
    # ys = 1 + 0.5*np.sin(np.linspace(0, 2*np.pi, 100))
    # # Regular
    # xs = np.cos((np.pi * np.array([0, 1, 2, 3, 4, 5]))/3)
    # ys = np.sin((np.pi * np.array([0, 1, 2, 3, 4, 5]))/3)
    # polygon = Polygon([i for i in zip(xs, ys)])

    # Polygon geometry:
    polygon = Polygon(boundaryVerts)
    centroid = polygon.centroid.coords[:][0]
    area = polygon.area

    if spindleLen == 'default':
        spindleLen = np.sqrt(area / np.pi)  # Set spindle len for circle
    # spindleLen = 1.95

    torqueList = []
    for spindleAngle in spindleAngles:

        # Centrosome position
        cSome1 = (centroid[0] - 0.5 * spindleLen * np.cos(spindleAngle),
                  centroid[1] - 0.5 * spindleLen * np.sin(spindleAngle))
        cSome2 = (centroid[0] + 0.5 * spindleLen * np.cos(spindleAngle),
                  centroid[1] + 0.5 * spindleLen * np.sin(spindleAngle))

        # PLOTTING
        if plot:
            fig = plt.figure(1)
            ax = fig.add_subplot(111)
            ax.cla()

            # Centrosomes
            ax.plot([cSome1[0], cSome2[0]], [cSome1[1], cSome2[1]],
                    color='tomato',
                    alpha=0.8,
                    linewidth=3,
                    solid_capstyle='round',
                    zorder=2)
            ax.plot(cSome1[0],
                    cSome1[1],
                    'o',
                    color='tomato',
                    alpha=0.8,
                    linewidth=3,
                    solid_capstyle='round',
                    zorder=3)
            ax.plot(cSome2[0],
                    cSome2[1],
                    'o',
                    color='tomato',
                    alpha=0.8,
                    linewidth=3,
                    solid_capstyle='round',
                    zorder=3)
            # Outline
            x, y = polygon.exterior.xy
            ax.plot(x,
                    y,
                    color='#6699cc',
                    alpha=0.7,
                    linewidth=3,
                    solid_capstyle='round',
                    zorder=2)
            ax.set_title('Single Cell Spindle')

        # TODO() Csome1 needs to be to the left of cSome2
        # Loop over a range of angles to get sum of MT forces at each angle.
        torque1, torque2 = 0, 0
        MTangleList = np.linspace(-np.pi / 2, np.pi / 2, numMTs)
        for phi in MTangleList:

            angle = phi + spindleAngle
            #############################
            # Create a MT
            MT1 = LineString([
                cSome1,
                (cSome1[0] - MTexpansionLen * np.cos(angle),
                 cSome1[1] - MTexpansionLen * np.sin(angle))
            ])
            # Get the length of the microtubule from centrosome to the cortex.
            intersectLine = MT1.intersection(polygon)
            # If the cell is highly convex, there may be multiple crosses
            # but we just wat the first. So check and take the first if so.
            _multiString = MultiLineString()
            if type(intersectLine) == type(_multiString):
                intersectLine = intersectLine[0]

            # Get the torque, if there was a line.
            if not intersectLine.is_empty:
                MTLen = intersectLine.length

                # Store the torque generated by the MT
                torque1 += 0.5 * spindleLen * np.sin(phi) * (MTLen**beta)

                if plot:
                    # Plot MTs
                    x1, y1 = intersectLine.xy
                    ax.plot(x1,
                            y1,
                            alpha=0.7,
                            color='limegreen',
                            linewidth=1,
                            solid_capstyle='round',
                            zorder=2)
            else:
                torque1 += np.nan

            #############################

            # Create a MT
            MT2 = LineString([
                cSome2,
                (cSome2[0] + MTexpansionLen * np.cos(angle),
                 cSome2[1] + MTexpansionLen * np.sin(angle))
            ])
            # Get the length of the microtubule from centrosome to the cortex.
            intersectLine = MT2.intersection(polygon)
            _multiString = MultiLineString()
            if type(intersectLine) == type(_multiString):
                intersectLine = intersectLine[0]

            if not intersectLine.is_empty:
                MTLen = intersectLine.length

                # Store the torque generated by the MT
                torque2 += 0.5 * spindleLen * np.sin(phi) * (MTLen**beta)

                if plot:
                    x1, y1 = intersectLine.xy
                    ax.plot(x1,
                            y1,
                            alpha=0.7,
                            color='limegreen',
                            linewidth=1,
                            solid_capstyle='round',
                            zorder=2)
            else:
                torque2 += np.nan

        # Update the torque
        torque = torque1 + torque2
        torqueList.append(torque)

        if plot:
            plt.axis('equal')
            plt.tight_layout()

    # Plot the torques vs angle
    if plot:
        fig = plt.figure(2)
        ax = fig.add_subplot(111)
        ax.plot(spindleAngles, torqueList)
        ax.set_xlabel('Spindle Angle')
        ax.set_ylabel('Torque')
        ax.set_title("Torque vs Spindle Angle")
        plt.tight_layout()

        fig = plt.figure(3)
        ax = fig.add_subplot(111)

    torqueList = np.array(torqueList)

    # Add nan to the MTangleList so we can remove same way as torques
    MTangleList[np.isnan(torqueList)] = np.nan
    # Split arrays up between nan entries
    torqueListSplit = np.split(torqueList, np.where(np.isnan(torqueList))[0])
    MTangleListSplit = np.split(MTangleList,
                                np.where(np.isnan(MTangleList))[0])
    # removing NaN entries
    torqueListSplit = [
        ev[~np.isnan(ev)] for ev in torqueListSplit if not np.isnan(ev).all()
    ]
    MTangleListSplit = [
        ev[~np.isnan(ev)] for ev in MTangleListSplit if not np.isnan(ev).all()
    ]
    # removing empty DataFrames
    torqueListSplit = [ev for ev in torqueListSplit if ev.size > 1]
    MTangleListSplit = [ev for ev in MTangleListSplit if ev.size > 1]

    # For each sublist, get the minima.
    minima = []
    for i in range(0, len(torqueListSplit)):
        if len(torqueListSplit[i]) > 2:
            # INterpolate a spline
            spline = interpolate.InterpolatedUnivariateSpline(
                MTangleListSplit[i], torqueListSplit[i])
            # Get the roots
            xp = spline.roots()
            # minima.extend(xp)
            # Check if the torque was decreasing, indicating a minimum.
            tempMinima = []
            for root in xp:
                if spline(root - .01) > spline(root + .01):
                    tempMinima.append(root)

            # Get the derivativeself
            deriv = spline.derivative()
            # If there were multiple roots, choose the smallest minimum
            if len(tempMinima) > 1:
                # Get values of deriv:
                derivVals = deriv(tempMinima)
                # Select the one with smallest deriv.
                minima.append(tempMinima[np.argmin(derivVals)])
            elif len(tempMinima) == 1:
                minima.append(tempMinima[0])

            if plot:
                xs = np.linspace(MTangleListSplit[i][0],
                                 MTangleListSplit[0][-1], 1000)
                # plt.plot(MTangleListSplit[i], torqueListSplit[i])
                plt.plot(xs, spline(xs))
                plt.plot(xp, [0] * xp.size, 'o')
                plt.plot(xs, deriv(xs))

    if plot:
        plt.tight_layout()

    # If there were no minima, just get the smallest torque
    if len(minima) == 0:
        MTangleList = MTangleList[~np.isnan(torqueList)]
        torqueList = torqueList[~np.isnan(torqueList)]
        index = np.argmin(np.abs(torqueList))
        minima = [MTangleList[index]]
    # If there were multiple minima, then just choose the smallest?
    if len(minima) > 1:
        print("Multiple minima")
    minimum = minima[0]

    if plot:
        plt.show()

    # This has found the angle of the spindle, now we return the predicted
    # Divison angle.
    minimum = np.degrees(minimum)
    print(minimum)

    return minimum
Ejemplo n.º 3
0
def TMCIdentification2GMNSNodeLinkFiles(TMC_file,
                                        link_base):  #output:node_tmc,link_tmc
    '''reading link_base'''
    link_base = pd.read_csv(link_base, low_memory=False)
    link_base = link_base[link_base['link_type_name'].isin(
        ['motorway', 'trunk'])]
    link_base = link_base[-link_base['name'].isna()]
    # link_base = link_base[link_base['link_type_name'].isin(['motorway','trunk','primary','secondary'])]
    link_base = link_base.reset_index()
    link_base = link_base.drop(['index'], 1)
    '''convert link_base to MultiLineString'''
    multiline_string_base_list = []
    multiline_string_base_list_sub = []
    for j in link_base.index:
        link_base_geometry_list = link_base.loc[j,
                                                'geometry'][12:-1].split(", ")
        for link_base_geometry in link_base_geometry_list:
            multiline_string_base_list_sub.append(
                (float(link_base_geometry.split(" ")[0]),
                 float(link_base_geometry.split(" ")[1])))
        multiline_string_base_list_sub = tuple(multiline_string_base_list_sub)
        multiline_string_base_list.append(multiline_string_base_list_sub)
        multiline_string_base_list_sub = []

    from shapely.geometry import MultiLineString
    line_base = MultiLineString(multiline_string_base_list)
    '''reading tmc'''
    tmc = pd.read_csv(TMC_file)
    tmc = tmc.drop_duplicates(subset=['direction', 'road_order']).sort_values(
        by=['direction', 'road_order'])
    tmc = tmc.reset_index()
    tmc = tmc.drop(['index'], 1)
    origin_tmc_num = len(tmc)
    '''remove out boundary tmc'''
    in_bbox_index_list = []
    for i in tmc.index:
        if (tmc['start_longitude'][i] > line_base.bounds[0]) & (tmc['start_longitude'][i] < line_base.bounds[2]) & \
            (tmc['end_longitude'][i] > line_base.bounds[0]) & (tmc['end_longitude'][i] < line_base.bounds[2]) & \
                (tmc['start_latitude'][i] > line_base.bounds[1]) & (tmc['start_latitude'][i] < line_base.bounds[3]) & \
            (tmc['end_latitude'][i] > line_base.bounds[1]) & (tmc['end_latitude'][i] < line_base.bounds[3]):
            in_bbox_index_list.append(i)

    tmc = tmc.loc[in_bbox_index_list]
    tmc = tmc.reset_index()
    tmc = tmc.drop(['index'], 1)
    if len(in_bbox_index_list) < origin_tmc_num:
        print('base map cannot cover all TMC nodes,' +
              str(origin_tmc_num - len(in_bbox_index_list)) +
              'tmc nodes are out of boundary box, please use larger base map')
    '''build node.csv'''
    print('converting tmc data into gmns format...')
    p = 1

    node_tmc = pd.DataFrame()
    node_tmc['name'] = None
    node_tmc['x_coord'] = None
    node_tmc['y_coord'] = None
    node_tmc['z_coord'] = None
    node_tmc['node_type'] = None
    node_tmc['ctrl_type'] = None
    node_tmc['zone_id'] = None
    node_tmc['geometry'] = None

    for i in range(0, len(tmc) - 1):
        if tmc.loc[i + 1, 'road_order'] > tmc.loc[i, 'road_order']:
            node_tmc = node_tmc.append({'name': tmc.loc[i,'tmc'],\
                                        'x_coord': tmc.loc[i,'start_longitude'], \
                                        'y_coord': tmc.loc[i,'start_latitude'],\
                                        'z_coord': None,\
                                        'node_type': 'tmc_start',\
                                        'ctrl_type': None,\
                                        'zone_id': None,\
                                        'geometry': "POINT (" + tmc.loc[i,'start_longitude'].astype(str) + " " + tmc.loc[i,'start_latitude'].astype(str) +")"}, ignore_index=True)
        else:
            node_tmc = node_tmc.append({'name': tmc.loc[i,'tmc'],\
                                        'x_coord': tmc.loc[i,'start_longitude'], \
                                        'y_coord': tmc.loc[i,'start_latitude'],\
                                        'z_coord': None,\
                                        'node_type': 'tmc_start',\
                                        'ctrl_type': None,\
                                        'zone_id': None,\
                                        'geometry': "POINT (" + tmc.loc[i,'start_longitude'].astype(str) + " " + tmc.loc[i,'start_latitude'].astype(str) +")"}, ignore_index=True)
            node_tmc = node_tmc.append({'name': tmc.loc[i,'tmc']+'END',\
                                        'x_coord': tmc.loc[i,'end_longitude'], \
                                        'y_coord': tmc.loc[i,'end_latitude'],\
                                        'z_coord': None,\
                                        'node_type': 'tmc_end',\
                                        'ctrl_type': None,\
                                        'zone_id': None,\
                                        'geometry': "POINT (" + tmc.loc[i,'end_longitude'].astype(str) + " " + tmc.loc[i,'end_latitude'].astype(str) +")"}, ignore_index=True)

        if i > p / 10 * len(tmc):
            print(str(p * 10) + "%" + ' nodes completed!')
            p = p + 1

    node_tmc = node_tmc.append({'name': tmc.loc[i+1,'tmc'],\
                                        'x_coord': tmc.loc[i+1,'start_longitude'], \
                                        'y_coord': tmc.loc[i+1,'start_latitude'],\
                                        'z_coord': None,\
                                        'node_type': 'tmc_start',\
                                        'ctrl_type': None,\
                                        'zone_id': None,\
                                        'geometry': "POINT (" + tmc.loc[i+1,'start_longitude'].astype(str) + " " + tmc.loc[i+1,'start_latitude'].astype(str) +")"}, ignore_index=True)

    node_tmc = node_tmc.append({'name': tmc.loc[i+1,'tmc']+'END',\
                                        'x_coord': tmc.loc[i+1,'end_longitude'], \
                                        'y_coord': tmc.loc[i+1,'end_latitude'],\
                                        'z_coord': None,\
                                        'node_type': 'tmc_end',\
                                        'ctrl_type': None,\
                                        'zone_id': None,\
                                        'geometry': "POINT (" + tmc.loc[i+1,'end_longitude'].astype(str) + " " + tmc.loc[i+1,'end_latitude'].astype(str) +")"}, ignore_index=True)

    node_tmc.index.name = 'node_id'

    node_tmc.index += 100000001  #index from 0

    node_tmc.to_csv('node_tmc.csv')
    print('node_tmc.csv (' + str(len(node_tmc)) + ' nodes' + ') generated!')
    '''build link_tmc.csv'''
    p = 1
    link_tmc = pd.DataFrame()
    link_tmc['name'] = None
    link_tmc['corridor_id'] = None
    link_tmc['corridor_link_order'] = None
    link_tmc['from_node_id'] = None
    link_tmc['to_node_id'] = None
    link_tmc['directed'] = None
    link_tmc['geometry_id'] = None
    link_tmc['geometry'] = None
    link_tmc['dir_flag'] = None
    link_tmc['length'] = None
    link_tmc['grade'] = None
    link_tmc['facility_type'] = None
    link_tmc['capacity'] = None
    link_tmc['free_speed'] = None
    link_tmc['lanes'] = None

    for i in range(0, len(tmc)):
        link_tmc = link_tmc.append({'name': tmc.loc[i,'tmc'],\
                                    'corridor_id': tmc.loc[i,'road']+'_'+tmc.loc[i,'direction'],\
                                    'corridor_link_order' : tmc.loc[i,'road_order'],\
                                    'from_node_id': node_tmc[(node_tmc['x_coord']==tmc.loc[i,'start_longitude']) & (node_tmc['y_coord']==tmc.loc[i,'start_latitude'])].index.values[0], \
                                    'to_node_id': node_tmc[(node_tmc['x_coord']==tmc.loc[i,'end_longitude']) & (node_tmc['y_coord']==tmc.loc[i,'end_latitude'])].index.values[0],\
                                    'directed': 1,\
                                    'geometry_id': None,\
                                    'geometry': "LINESTRING (" + tmc.loc[i,'start_longitude'].astype(str) + " " + tmc.loc[i,'start_latitude'].astype(str) + "," +\
                                        tmc.loc[i,'end_longitude'].astype(str) +" "+ tmc.loc[i,'end_latitude'].astype(str) + ")",\
                                    'dir_flag': 1,\
                                    'length': tmc.loc[i,'miles'],\
                                    'grade': None,\
                                    'facility_type': 'interstate' if tmc.loc[i,'road'][0] == 'I'else None ,\
                                    'capacity':None,\
                                    'free_speed':None,\
                                    'lanes': None}, ignore_index=True)

        if i > p / 10 * len(tmc):
            print(str(p * 10) + "%" + ' links completed!')
            p = p + 1

    link_tmc.index.name = 'link_id'
    link_tmc.index += 100000001

    link_tmc.to_csv('link_tmc.csv')

    print('link_tmc.csv (' + str(len(link_tmc)) + ' links' + ') generated!')
 def test_publish_to_postgis(self):
     CsvDataProvider.register_data_provider('csv_provider')
     csv_provider = CsvDataProvider(
         'csv_provider',
         occurrence_csv_path=TEST_OCCURRENCE_CSV,
     )
     csv_provider.sync()
     with Connector.get_connection() as connection:
         sel = select([
             meta.occurrence.c.id.label('id'),
             meta.occurrence.c.taxon_id.label('taxon_id'),
             cast(meta.taxon.c.rank.label('rank'), String).label('rank'),
             meta.taxon.c.full_name.label('full_name'),
             cast(meta.occurrence.c.location, String).label('location'),
         ]).select_from(
             meta.occurrence.outerjoin(
                 meta.taxon, meta.taxon.c.id == meta.occurrence.c.taxon_id))
         df = gpd.read_postgis(sel,
                               connection,
                               index_col='id',
                               geom_col='location',
                               crs='+init=epsg:4326')
         BaseDataPublisher._publish_sql(df,
                                        'test_export_postgis',
                                        schema='niamoto')
         engine = Connector.get_engine()
         inspector = Inspector.from_engine(engine)
         self.assertIn(
             'test_export_postgis',
             inspector.get_table_names(schema=settings.NIAMOTO_SCHEMA),
         )
         df2 = gpd.read_postgis(sel,
                                connection,
                                index_col='id',
                                geom_col='location',
                                crs={'init': 'epsg:4326'})
         BaseDataPublisher._publish_sql(
             df2,
             'test_export_postgis',
             schema='niamoto',
             if_exists='truncate',
             truncate_cascade=True,
         )
         # Test geometry types
         polygon = Polygon([(0, 0), (1, 0), (1, 1)])
         linestring = LineString([(0, 0), (0, 1), (1, 1)])
         multipoint = MultiPoint([(1, 2), (3, 4), (5, 6)])
         multilinestring = MultiLineString([[(1, 2), (3, 4), (5, 6)],
                                            [(7, 8), (9, 10)]])
         polygon_2 = Polygon([(1, 1), (1, -1), (-1, -1), (-1, 1)],
                             [[(.5, .5), (.5, -.5), (-.5, -.5), (-.5, .5)]])
         multipolygon = MultiPolygon([polygon, polygon_2])
         geometry = GeometryCollection([polygon, polygon_2])
         BaseDataPublisher._publish_sql(
             gpd.GeoDataFrame([{
                 'A': 1,
                 'geom': polygon
             }], geometry='geom'),
             'test_export_postgis',
             schema='niamoto',
             if_exists='replace',
         )
         BaseDataPublisher._publish_sql(
             gpd.GeoDataFrame([{
                 'A': 1,
                 'geom': linestring
             }],
                              geometry='geom'),
             'test_export_postgis',
             schema='niamoto',
             if_exists='replace',
         )
         BaseDataPublisher._publish_sql(
             gpd.GeoDataFrame([{
                 'A': 1,
                 'geom': multilinestring
             }],
                              geometry='geom'),
             'test_export_postgis',
             schema='niamoto',
             if_exists='replace',
         )
         BaseDataPublisher._publish_sql(
             gpd.GeoDataFrame([{
                 'A': 1,
                 'geom': multipoint
             }],
                              geometry='geom'),
             'test_export_postgis',
             schema='niamoto',
             if_exists='replace',
         )
         BaseDataPublisher._publish_sql(
             gpd.GeoDataFrame([{
                 'A': 1,
                 'geom': multipolygon
             }],
                              geometry='geom'),
             'test_export_postgis',
             schema='niamoto',
             if_exists='replace',
         )
         BaseDataPublisher._publish_sql(
             gpd.GeoDataFrame([{
                 'A': 1,
                 'geom': geometry
             }], geometry='geom'),
             'test_export_postgis',
             schema='niamoto',
             if_exists='replace',
         )
         BaseDataPublisher._publish_sql(
             gpd.GeoDataFrame([{
                 'A': 1,
                 'geom': geometry
             }], geometry='geom'),
             'TEST123',
             schema='niamoto',
             if_exists='replace',
         )
         BaseDataPublisher._publish_sql(
             gpd.GeoSeries([polygon]),
             'test_export_postgis',
             schema='niamoto',
             if_exists='replace',
         )
         self.assertRaises(
             ValueError,
             BaseDataPublisher._publish_sql,
             gpd.GeoSeries([polygon]),
             'test_export_postgis',
             schema='niamoto',
             if_exists='thisisnotallowed',
         )
Ejemplo n.º 5
0
                                pass

                    except ValueError:

                        rid = save_points[-1]
                        del line_coords[line_coords.index(tuple(loads(gm[identifier == rid].iloc[0]).coords)) + 1:]
                        del coords_count[(line_coords.index(tuple(loads(gm[identifier == rid].iloc[0]).coords)) + 1) * 2:]

                else:

                    for b in by_product:

                        try:

                            g = tuple(loads(gm[identifier == b].iloc[0]).coords)
                            line_coords.remove(g)

                        except ValueError:

                            pass

                    line_coords.append(tuple(loads(gm[identifier == row['od_id']].iloc[0]).coords))
                    fileN.loc[n, 'geom'] = str(MultiLineString(line_coords))
                    fileN.loc[n, 'line_id'] = n
                    n += 1
                    stop = row['od_id']

fileN.to_csv("Generated.csv", index=False)

print(time.time() - start_time, "time")
Ejemplo n.º 6
0
def create_segments_from_json(roads_shp_path, mapfp):
    roads, inters = util.get_roads_and_inters(roads_shp_path)
    print("read in {} road segments".format(len(roads)))

    # unique id did not get included in shapefile, need to add it for adjacency
    for i, road in enumerate(roads):
        road.properties['orig_id'] = int(str(99) + str(i))

    # Initial buffer = 20 meters
    int_buffers = get_intersection_buffers(inters, 20)
    print("Found {} intersection buffers".format(len(int_buffers)))
    non_int_lines, inter_segments = find_non_ints(roads, int_buffers)

    non_int_w_ids = []

    # Allow intersections that don't have osmids, because this
    # happens when we generate alternate maps from city data
    # They won't have display names, and this is okay, because
    # we only use them to map to the osm segments
    inters_by_id = {
        x['properties']['osmid'] if 'osmid' in x['properties'] else '0':
        x['properties']['streets'] if 'streets' in x['properties'] else None
        for x in inters
    }

    for i, l in enumerate(non_int_lines):
        value = copy.deepcopy(l)
        value['type'] = 'Feature'
        value['properties']['id'] = '00' + str(i)
        value['properties']['inter'] = 0
        value['properties']['display_name'] = get_non_intersection_name(
            l, inters_by_id)
        non_int_w_ids.append(value)

        x, y = util.get_center_point(value)
        x, y = util.reproject([[x, y]],
                              inproj='epsg:3857',
                              outproj='epsg:4326')[0]['coordinates']
        value['properties']['center_y'] = y
        value['properties']['center_x'] = x

    print("extracted {} non-intersection segments".format(len(non_int_w_ids)))

    # Planarize intersection segments
    # Turns the list of LineStrings into a MultiLineString
    union_inter = []
    for idx, lines in list(inter_segments['lines'].items()):

        lines = unary_union(lines)
        coords = []
        # Fixing issue where we had previously thought a dead-end node
        # was an intersection. Once this is fixed in osmnx
        # (or we have a better work around), this should be able to
        # be taken out
        if type(lines) == LineString:
            lines = MultiLineString([lines.coords])
        for line in lines:
            coords += [[x for x in line.coords]]

        name = get_intersection_name(inter_segments['data'][idx])
        # Add the number of segments coming into this intersection
        segment_data = []
        for segment in list(inter_segments['data'][idx]):
            segment['intersection_segments'] = len(inter_segments['data'][idx])
            segment_data.append(segment)

        properties = {'id': idx, 'data': segment_data, 'display_name': name}
        value = geojson.Feature(
            geometry=geojson.MultiLineString(coords),
            id=idx,
            properties=properties,
        )
        x, y = util.get_center_point(value)
        x, y = util.reproject([[x, y]],
                              inproj='epsg:3857',
                              outproj='epsg:4326')[0]['coordinates']

        value['properties']['center_x'] = x
        value['properties']['center_y'] = y
        union_inter.append(value)

    return non_int_w_ids, union_inter
Ejemplo n.º 7
0
    #x_append =
    my_list_x.append(x[i:i + 2])
    x_coordinates = np.hstack(my_list_x)
    #y_append =
    my_list_y.append(y[i:i + 2])
    y_coordinates = np.hstack(my_list_y)

print "X_coordinates", x_coordinates  #my_list_x
print "Y_coordinates", y_coordinates  #y_append
pair_list = np.vstack((x_coordinates, y_coordinates)).T
print "x,y_coordinates_points\n", pair_list

line_coordinates = []
p = 0
temp = []
for pair in pair_list:
    p += 1
    if p % 2 == 0:
        temp.append(pair)
        line_coordinates.append(temp)
        temp = []
    else:
        temp.append(pair)

print "line_coordinates\n", line_coordinates
mlines = MultiLineString(line_coordinates)
print "MultiLineString\n", mlines
for line_1, line_2 in combinations([line for line in mlines], 2):
    if line_1.intersects(line_2):
        print(line_1.intersection(line_2))
Ejemplo n.º 8
0
    def extendFaultLines(self):
        """Extend Fault lines
            When extend fault lines to the boundary:
                1. More intersection points added
                2. boundary line split by these additional intersection point
                3. More fault lines added if extended fault line intersectes
        Arguments
        ---------
        FaultLines   -- [dict] Unique fault line data [Verts][LocID] 
        IntersectPts -- intersection points (end points) for fault lines

        Author:Bin Wang([email protected])
        Date: Sep. 2018
        """
        debug = 0
        OldLines = MultiLineString(self.BoundaryLines + self.FaultLines)
        FaultLine_Extend = self.FaultLines[:]
        BoundaryLine_Splitted = self.BoundaryLines[:]

        #Step 1. Extend Faults Lines
        ExtendLineIDs = []
        ExtendLines = []
        NewIntersectPts = []
        for i, Line in enumerate(self.FaultLines):
            #if(i>25):
            #    continue
            flag = 0
            StartPoint, EndPoint = Line[0], Line[-1]
            countSP = self.IntersectPts.count(StartPoint)
            countEP = self.IntersectPts.count(EndPoint)

            NewLine = Line[:]
            NewEndPoint = []
            if (debug): print('Before', NewLine, countSP, countEP)
            if (countSP == 1
                    and isBoundaryVert(self.GRDECL_Data, StartPoint) == False):
                #if(debug):print('SV ',StartPoint,'is a hanging vert')
                NewEndPoint = extend_FaultLines(self.GRDECL_Data, Line,
                                                FaultLine_Extend, 'StartPoint')
                NewLine = NewEndPoint + NewLine  #+NewLine[1:]
                NewIntersectPts.append(NewEndPoint[0])
                flag = 1
            if (countEP == 1
                    and isBoundaryVert(self.GRDECL_Data, EndPoint) == False):
                #if(debug): print('EV ',EndPoint,'is a hanging vert')
                NewEndPoint = extend_FaultLines(self.GRDECL_Data, Line,
                                                FaultLine_Extend, 'EndPoint')
                NewLine = NewLine + NewEndPoint  #NewLine[:-1]+NewEndPoint#
                NewIntersectPts.append(NewEndPoint[0])
                flag = 1
            if (flag == 1):
                if (debug): print('After', NewLine)
                ExtendLines.append(NewLine)
                ExtendLineIDs.append(i)
                FaultLine_Extend[i] = NewLine

        if (debug):
            print('Added EndPoint', sorted(NewIntersectPts))
            print('Extended Lines', ExtendLineIDs)

        if (len(ExtendLines) > 0):  #We have extenable lines
            #Step 2. Find the intersection points between newly extended lines
            NewLines = MultiLineString(ExtendLines)
            PossibileIntersectPts = []
            for i, line_i in enumerate(NewLines):
                for j, line_j in enumerate(NewLines):
                    if (j > i):
                        result = line_i.intersection(line_j)
                        if (result.geom_type in ['LineString', 'Point']):
                            #print('--------',result.geom_type)
                            result = list(result.coords)
                        else:
                            if (len(result) > 0):
                                #print('--------',result.geom_type)
                                if (result.geom_type == 'MultiPoint'):
                                    result = Shapely2List_MultiPoint(result)
                                else:
                                    print(
                                        "!!!!!!!!!!!!!!May have problem...Check extendFaultLines!!"
                                    )
                        if (len(result) > 0):
                            #print(result)
                            #print(i,j,line_i,line_j)
                            PossibileIntersectPts += result
            print('Added %d new intersection pts' %
                  (len(PossibileIntersectPts)))
            NewIntersectPts += PossibileIntersectPts

            #Step 3. Split the old line in terms of new intersection point
            if (len(NewIntersectPts) > 0):
                result = split(MultiLineString(FaultLine_Extend),
                               MultiPoint(NewIntersectPts))
                FaultLine_Extend = Shapely2List_MultiLineString(result)
                result = split(MultiLineString(self.BoundaryLines),
                               MultiPoint(NewIntersectPts))
                BoundaryLine_Splitted = Shapely2List_MultiLineString(result)

            #debug
            #self.plotLines(BoundaryLine_Splitted,FaultLine_Extend)

        return BoundaryLine_Splitted, FaultLine_Extend, self.IntersectPts + NewIntersectPts
Ejemplo n.º 9
0
def as_MultiLineString(shape):
    if isinstance(shape, LineString):
        return MultiLineString([shape])
    return shape
Ejemplo n.º 10
0
def main(gdf_lines, gdf_poly):


		
	# file_name = 'petr_kamch'
	# read_shp = gpd.read_file(r'./shp/raw/{}.shp'.format(file_name), encoding = 'utf-8', errors='ignore')
	sleep(pause)


	# удаление кривых символов в строке (которые не преобразовались по unicode)
	# city_graph = read_shp.copy()
	city_graph = gdf_lines.copy()
	sleep(pause)
	#################
	#list_columns = [city_graph['name'], city_graph['other_tags']]
	#np_city_gr = city_graph.to_numpy()

	def bytesDecode(list_columns):
		list_new_columns = []
		for column in (list_columns):
			list_strings = []
			for row in (column):
				new_row = row
				if (isinstance(bytes(), type(row)) == True):
					list_new_values = []
					j=0
					new_value = ""
					for j in range(len(row)):
						try:
							new_value = row[j:j+1].decode() 
							# декодирование по одному байтовому символу
						except (UnicodeDecodeError, AttributeError):
							try:
								new_value = row[j:j+2].decode() 
								# у кириллицы на одну букву два байтовых символа
							except (UnicodeDecodeError, AttributeError):
								new_value = ""
						list_new_values.append(new_value)
					new_string = ""
					i=0
					for i in list_new_values:
						new_string = new_string+i
					#if (len(new_string.encode('utf-8')) >= 254): 
						# максимальная длина строки в shp - 255
						#new_string = new_string[:254]
					new_row = new_string + '"' #
				list_strings.append(new_row)
		#
			list_new_columns.append(list_strings)
		return list_new_columns
		# 
	# 
	# ind_name = list(city_graph.columns).index('name')
	# ind_ot = list(city_graph.columns).index('other_tags')
	list_columns = [city_graph['name'], city_graph['other_tags']]
	#list_columns = list(np_city_gr[:,ind_name], np_city_gr[:,ind_ot])

	list_new_columns = bytesDecode(list_columns)
	sleep(pause)


	#################
	city_graph['name'] = list_new_columns[0]
	city_graph['other_tags'] = list_new_columns[1]


	# до фильтрации сохранить нужные ребра трамвайных и жд путей
	rail_tram = city_graph[(city_graph['other_tags'].str.contains('"railway"=>"tram"', na=False))]
	rail_main = city_graph[(city_graph['other_tags'].str.contains('"railway"=>"rail"', na=False))]
	rail_main = rail_main[((rail_main['other_tags'].str.contains('"usage"=>"main"', na=False)) 
						   | (rail_main['other_tags'].str.contains('"usage"=>"branch"', na=False))
						   | ((~rail_main['other_tags'].str.contains('service"=>"', na=False)) 
							  & ((~rail_main['other_tags'].str.contains('usage"=>"', na=False)))))]
	#
	rail_subw = city_graph[(city_graph['other_tags'].str.contains('"railway"=>"subway"', na=False))]
	rail_subw = rail_subw[~(rail_subw['other_tags'].str.contains('service', na=False))]


	# удаление строк, содержаших ненужные значения
	#  списки ненужных значений, которые надо будет удалить
	lst_highway_notok = ['steps', 'pedestrian', 'footway', 'path', 'raceway', 'road', 'track', 'planned', 'proposed', 'cycleway']

	lst_ot_notok = ['access"=>"no','abandoned','admin_level','aeroway','attraction','building',
					'ferry','grass','hiking', 'ice_road','land','leaf_type',
					'leisure','mud','natural','piste',
					'planned','power','private','proposed','wood','wokrset']
	# some are ok: unpaved,description


	city_graph = city_graph[
		(city_graph.waterway.isna())
		& (city_graph.aerialway.isna())
		& (city_graph.man_made.isna())
		& ((city_graph.barrier.isna()) | (city_graph['barrier'] == 'yes'))
		& (~city_graph.highway.isin(lst_highway_notok))
		& (~city_graph['other_tags'].str.contains('|'.join(lst_ot_notok), na=False))
		& (~(city_graph['other_tags'].str.contains("sand", na=False) & city_graph.name.isna()))
		& (~((city_graph.z_order == 0) & (city_graph.name.isna())))
		& (~((city_graph.highway == 'construction') 
			  & (city_graph.other_tags.isna()) & (city_graph.name.isna())))
		& (~((city_graph.highway == 'service') & (city_graph.name.isna())))
								 ].reset_index(drop=True)
	#
	
	
	sleep(pause)
	########
	# https://automating-gis-processes.github.io/site/notebooks/L3/spatial_index.html

	def intersect_using_spatial_index(source_gdf, intersecting_gdf):
		"""
		Conduct spatial intersection using spatial index for candidates GeoDataFrame to make queries faster.
		Note, with this function, you can have multiple Polygons in the 'intersecting_gdf' and it will return all the points
		intersect with ANY of those geometries.
		"""
		source_sindex = source_gdf.sindex
		possible_matches_index = []

		# 'itertuples()' function is a faster version of 'iterrows()'
		for other in intersecting_gdf.itertuples():
			bounds = other.geometry.bounds
			c = list(source_sindex.intersection(bounds))
			possible_matches_index += c

		# Get unique candidates
		unique_candidate_matches = list(set(possible_matches_index))
		possible_matches = source_gdf.iloc[unique_candidate_matches]

		# Conduct the actual intersect
		result = possible_matches.loc[possible_matches.intersects(intersecting_gdf.unary_union)]
		return result
	########
	
		# оставить только те ребра, которые внутри полигона
	try:
		gdf_lines_tmp = intersect_using_spatial_index(city_graph, gdf_poly[['geometry']])
		gdf_lines_tmp = gdf_lines_tmp.reset_index(drop=True)
		# gdf_lines_tmp = gpd.sjoin(gdf_lines, gdf_poly[['geometry']], how='inner', 
							  # op='intersects').drop("index_right", axis=1).reset_index(drop=True)
		#
		if len(gdf_lines_tmp) > (len(city_graph) / 2):
			city_graph = gdf_lines_tmp.copy()
		gdf_lines_tmp = None
		gdf_poly = None
		del gdf_lines_tmp, gdf_poly
	except:
		pass
	#
	
	#############################
	# constructions - temporary changes in the road 
	# no name and no tags - new road, should be deleted
	
	constr = city_graph[(((city_graph.highway == 'construction') 
							  & ~(city_graph.other_tags.isna()) 
							  & ~(city_graph.name.isna())) 
							 | ((city_graph.other_tags.str.contains("bridge", na=False)) 
								& (~(city_graph.name.isna()))
								& ((city_graph.highway.isna()))))].reset_index(drop=True)
	#
	
	#########
	#city_graph2 = city_graph.copy()
	cg_crs = city_graph.crs
	np_cg2 = city_graph.to_numpy()
	ind_hw = list(city_graph.columns).index('highway')
	ind_oi = list(city_graph.columns).index('osm_id')

	lst_contstr_name=list(constr.name.unique())
	i=0
	for i in (range(len(lst_contstr_name))):
		one_name = lst_contstr_name[i]
		df_small = constr[constr.name == one_name].reset_index(drop=True)
		sj_df = intersect_using_spatial_index(city_graph,df_small)
	#	 sj_df = gpd.sjoin(df_small, city_graph[['highway', 'name', 'geometry']], 
	#					   how='inner', op='intersects').drop("index_right", axis=1)
		lst_hw = list(sj_df[((sj_df.name == one_name) 
							 & (sj_df.highway != 'construction') 
							 & ((sj_df.highway.astype(str) != 'None')))].highway.unique())
		if lst_hw:
			new_hw = lst_hw[0]
		else:
			new_hw = constr.highway[i]
		for j in range(len(df_small)):
			ind_big = list(np_cg2[:,ind_oi]).index(df_small.osm_id[j])
			np_cg2[ind_big,ind_hw] = new_hw
	# 
	sleep(pause)
	
	lst_col = list(city_graph.columns)

	city_graph = gpd.GeoDataFrame(np_cg2, columns=lst_col)
	city_graph.crs = cg_crs
	#########
	#############################

	# удаление ненужных и добавление нужных жд и трамвайных путей
	city_graph = city_graph[
		(~city_graph['other_tags'].str.contains('=>"rail"', na=False))
		& (~city_graph['other_tags'].str.contains('railway', na=False))
						 ]
	city_graph = city_graph.append(rail_tram)
	city_graph = city_graph.append(rail_main)
	city_graph = city_graph.append(rail_subw)

	#city_graph = city_graph[(~city_graph['other_tags'].str.contains('disused', na=False))]

	city_graph = city_graph.reset_index(drop=True)

	sleep(pause)

	# select lines which are used in routes of public transport
	try:
		buff_gdf_multilines = gdf_multilines.to_crs('epsg:32637').buffer(0.5).to_crs('epsg:4326')
		buff_gdf_multilines = gpd.GeoDataFrame(geometry=buff_gdf_multilines)
		inter_gdf_lines =  gpd.sjoin(gdf_lines, buff_gdf_multilines, how='inner', 
									 op='within').drop("index_right", axis=1).reset_index(drop=True)

		add_new_tmp = inter_gdf_lines[~inter_gdf_lines.osm_id.isin(list(city_graph.osm_id))] #add only new lines

		add_new = gdf_lines[gdf_lines.osm_id.isin(list(add_new_tmp.osm_id))]
		add_new = add_new[
			(add_new.waterway.isna())
			& (add_new.aerialway.isna())
			& (add_new.barrier.isna())
			& (add_new.man_made.isna()) 
			& ~add_new.highway.isin(lst_highway_notok) 
			& (~add_new['other_tags'].str.contains('|'.join(lst_ot_notok), 
												   na=False))].drop_duplicates()
	#
		city_graph = city_graph.append(add_new).reset_index(drop=True)
	except:
		pass
	#
	#####
	#! ATTENTION!!!
	gdf_lines = None
	del gdf_lines
	sleep(pause)
	#####
	########################
	# изменение направления ребра для oneway=-1
	reverse_oneway = city_graph[city_graph.other_tags.str.contains('oneway"=>"-1', 
																   na=False)].reset_index(drop=True)
	#
	####################
	np_city_gr = city_graph.to_numpy()
	lst_rev_on = list(reverse_oneway.osm_id)
	ind_geo = list(city_graph.columns).index('geometry')
	ind_oi = list(city_graph.columns).index('osm_id')

	def RevOnwGeo(np_city_gr,lst_rev_on,ind_geo,ind_oi):
		lst_geo_new=[]
		i=0
		for i in range(len(np_city_gr)):
			if np_city_gr[i][ind_oi] in lst_rev_on:
				list_geo = list(np_city_gr[i][ind_geo].coords[:])
				one_reversed_geo = list_geo[::-1]
				line_2 = LineString(one_reversed_geo)
				lst_geo_new.append(line_2)
			else:
				lst_geo_new.append(np_city_gr[i][ind_geo])
		#
		return lst_geo_new
	# 

	lst_geo_new = RevOnwGeo(np_city_gr,lst_rev_on,ind_geo,ind_oi)
	#################### 
	sleep(pause)

	try:
		city_graph['geometry'] = lst_geo_new
	except:
		print("Error_rev")
	# 
	########################
	# #Обработка графа - дробление ребер по перекресткам и создание узлов (nodes)

	# здесь происходит дробление ребер по всем пересечениям (даже на многоуровневых эстакадах)
	# обработка эстакад будет ниже
	lines = list(city_graph.geometry)
	graph = unary_union(lines)
	res_graph = gpd.GeoDataFrame(graph) 
	# если сделать через geometry=[graph] он делает из графа один большой multilinestring
	res_graph = res_graph.rename(columns={0:'geometry'})
	res_graph.crs='epsg:4326'
	#res_graph = res_graph.to_crs('epsg:4326')
	res_graph = res_graph.reset_index(drop=True)
	res_graph = res_graph.reset_index()
	
	sleep(pause)

	# подтягивание полей с информацией по пересечению геометрий
	graph_info = gpd.sjoin(res_graph, city_graph, how='left', 
							   op='within').drop("index_right", axis=1).reset_index(drop=True)
	#
	del graph_info['index']
	#################################
	nans_g = graph_info[graph_info.osm_id.isna()]
	if len (nans_g) != 0:
		nans_g = gpd.sjoin(nans_g[['geometry']], city_graph, how='inner', 
								   op='intersects').drop("index_right", axis=1).reset_index(drop=True)
	#
	sleep(pause)

	# эти необходимо пере_разбить
	tmp_city = city_graph[city_graph.osm_id.isin(nans_g.osm_id)].reset_index(drop=True)

	# удалить пустые и те, которые необходимо пере_разбить, остальные - ок
	good_graph_info = graph_info[((~graph_info.osm_id.isna()) 
								  & (~graph_info.osm_id.isin(tmp_city.osm_id)))].reset_index(drop=True)
	#
	##############
	np_tc = tmp_city.to_numpy()
	ind_geo = list(tmp_city.columns).index('geometry')
	ind_oi = list(tmp_city.columns).index('osm_id')
	
	sleep(pause)
	########


	lst_uu_geo = []
	newlst=[]
	i=0
	for i in (range(len(np_tc))):
		one_geo = gpd.GeoDataFrame(geometry=[np_tc[i,-1]])
		one_geo.crs = city_graph.crs
		tmp_sj = intersect_using_spatial_index(city_graph,one_geo)
	#	 tmp_sj = gpd.sjoin(one_geo, city_graph, how='inner', 
	#						op='intersects').drop("index_right", axis=1).reset_index(drop=True)
		sj_one = list(city_graph[city_graph.osm_id.isin(tmp_sj.osm_id)].geometry)
		lst_one_geo = np_tc[i][ind_geo].coords[:]
		uniqlines = []
		lst_sj_ends=[]
		j=0
		for j in range(len(sj_one)):
			lst_uu = []
			tmp_lst = []
			tmp_lst.append(sj_one[j])
			lst_sj_one_geo = sj_one[j].coords[:]
			res = list(set(lst_one_geo) & set(lst_sj_one_geo)) #find mutual points
			if len(res) > 0:
				for k in res:
					if ((k != np_tc[i][ind_geo].coords[0]) & (k != np_tc[i][ind_geo].coords[-1])):
						if sj_one[j] not in lst_uu:
							lst_uu.append(sj_one[j])
					if ((k == sj_one[j].coords[0]) | (k == sj_one[j].coords[-1])):
						if sj_one[j] not in lst_sj_ends:
							lst_sj_ends.append(sj_one[j])
			#
			for line in lst_uu:
				if not any(p.equals(line) for p in uniqlines):
					uniqlines.append(line)
		if np_tc[i][ind_geo] not in uniqlines:
				uniqlines.append(np_tc[i][ind_geo])
		if len(uniqlines) > 1:
			uu_geo = unary_union(uniqlines)
			one_gdf = gpd.GeoDataFrame(geometry=list(uu_geo))
			one_gdf.crs=city_graph.crs
			tmp_one_gdf = gpd.sjoin(one_gdf, city_graph, how='left', 
									op='within').drop("index_right", axis=1)
			if len(tmp_one_gdf[tmp_one_gdf.osm_id.isna()]) > 0:
				line_f = np_tc[i][ind_geo]
				line = np_tc[i][ind_geo]
				d=0
				cnt=0
				for d in range(len(lst_sj_ends)):
					try:
						point = Point(list(set(line_f.coords[:]) & set(lst_sj_ends[d].coords[:]))[0])
						if (((point.coords[0] != line_f.coords[0]) & (point.coords[0] != line_f.coords[-1])) 
							& ((point.coords[0] == lst_sj_ends[d].coords[0]) | (point.coords[0] == lst_sj_ends[d].coords[-1]))):
							new_geo = MultiLineString(list(shapely.ops.split(line,point)))
							line = new_geo
							cnt+=1
					except:
						pass
				if cnt > 0:
					newlst.append(new_geo)
				else:
					new_geo = np_tc[i][ind_geo]
			else:
				new_geo = MultiLineString(list(tmp_one_gdf[tmp_one_gdf.osm_id 
																		== np_tc[i][ind_oi]].geometry))
		else:
			new_geo = np_tc[i][ind_geo]
		#
		lst_uu_geo.append(new_geo)
	# 
	####
	# ! ATTENTION
	city_graph = None
	del city_graph
	####
	
	sleep(pause)
	##############
	#
	try:
		tmp_city['uu_geo'] = lst_uu_geo
	except:
		print("Error_uu")
	#
	###############
	np_tmp_ct = tmp_city.to_numpy()
	ind_uug = list(tmp_city.columns).index('uu_geo')
	
	sleep(pause)

	new_df = []
	i=0
	for i in (range(len(np_tmp_ct))):
		
		one_line = np_tmp_ct[i][ind_uug]
		try:
			len_ol = len(one_line)
			j = 0
			for j in range(len_ol):
				lst_one = list(np_tmp_ct[i][:ind_uug])
				lst_one.append(one_line[j])
				new_df.append(lst_one)
		except:
			new_df.append(list(np_tmp_ct[i]))
	# 
	
	sleep(pause)
	
	###############
	new_gdf = gpd.GeoDataFrame(columns=tmp_city.columns, data=new_df)
	del new_gdf['geometry']
	new_gdf = new_gdf.rename(columns={'uu_geo':'geometry'})
	new_gdf.crs='epsg:4326'

	tr_gi = good_graph_info.append(new_gdf).reset_index(drop=True)
	graph_filtrd = tr_gi.copy()
	graph_filtrd['z_order'] = graph_filtrd['z_order'].astype(np.int64)
	#################################
	
	sleep(pause)

	#################################################
	# создание digraph (двунаправленного графа)

	###### Create Reverse of graph
	#direct_gdf = new_graph[['osm_id', 'name', 'highway', 'z_order', 'other_tags', 'geometry']].copy()

	direct_gdf = graph_filtrd[['osm_id', 'name', 'highway', 'z_order', 'other_tags', 'geometry']].copy()
	reverse_gdf = direct_gdf.copy()
	
	sleep(pause)

	###############
	np_rev_gdf = reverse_gdf.to_numpy()
	ind_geo = list(reverse_gdf.columns).index('geometry')
	reversed_geo_all = []
	i=0
	for i in (range(len(np_rev_gdf))):
		list_geo = list(np_rev_gdf[i][ind_geo].coords[:])
		one_reversed_geo = list_geo[::-1]
		line_2 = LineString(one_reversed_geo)
		reversed_geo_all.append(line_2)
	# 
	###############

	#rev_geo = gpd.GeoDataFrame(geometry=reversed_geo_all)
	try:
		reverse_gdf['rev_geo'] = reversed_geo_all
		reverse_gdf = reverse_gdf.rename(columns={'geometry':'old_geo', 'rev_geo':'geometry'})
		reverse_gdf = reverse_gdf[['osm_id', 'name', 'highway', 'z_order', 'other_tags', 'geometry']]
	except:
		print("Error!")
	#
	sleep(pause)


	direct_gdf['direction'] = "direct"
	reverse_gdf['direction'] = "reverse"
	all_gdf = direct_gdf.append(reverse_gdf).reset_index(drop=True)
	all_gdf.crs = 'epsg:4326'


	############################################

	# разбиение петель и ребер, где на пару node_from node_to - больше 2 ребер

	tmp_grph = all_gdf.copy()
	#
	lst_petl = []
	lst_start = []
	lst_end = []
	lst_stend = []

	#############
	np_tmp_gr = tmp_grph.to_numpy()
	ind_geo = list(tmp_grph.columns).index('geometry')
	
	sleep(pause)

	i=0
	for i in (range(len(np_tmp_gr))):
		if np_tmp_gr[i][ind_geo].coords[0] == np_tmp_gr[i][ind_geo].coords[-1]:
			lst_petl.append(np_tmp_gr[i][ind_geo])
		else:
			lst_stend.append(str(np_tmp_gr[i][ind_geo].coords[0]) + "_" + str(np_tmp_gr[i][ind_geo].coords[-1]))
	# 
	#############
	sleep(pause)

	my_dict = {i:lst_stend.count(i) for i in lst_stend}

	newDict = {}
	for (key, value) in my_dict.items():
		if value > 1:
			newDict[key] = value
	#

	#
	lst_geo = []
	lst_len_geo = []
	###########
	# np_tmp_gr = tmp_grph.to_numpy()
	# ind_geo = list(tmp_grph.columns).index('geometry')
	i=0
	for i in range(len(np_tmp_gr)):
		str_st_end = str(np_tmp_gr[i][ind_geo].coords[0]) + "_" + str(np_tmp_gr[i][ind_geo].coords[-1])
		if str_st_end in (newDict.keys()):
			lst_geo.append(str_st_end)
			lst_len_geo.append(np_tmp_gr[i][ind_geo].length)
		else:
			lst_geo.append(None)
			lst_len_geo.append(None)
	#
	###########
	
	sleep(pause)

	tmp_grph['geo_grup'] = lst_geo
	tmp_grph['geo_len'] = lst_len_geo

	################
	np_tmp_gr = tmp_grph.to_numpy()
	ind_ggr = list(tmp_grph.columns).index('geo_grup')
	ind_glen = list(tmp_grph.columns).index('geo_len')

	dct_gr_len = {}
	i=0
	for i in range(len(np_tmp_gr)):
		one_group = np_tmp_gr[i][ind_ggr]
		if one_group not in dct_gr_len.keys():
			lst_gr_len = []
			dct_gr_len[one_group] = lst_gr_len
		dct_gr_len[one_group] = dct_gr_len[one_group] + [np_tmp_gr[i][ind_glen]]  
	# 
	################
	sleep(pause)

	del dct_gr_len[None]
	del tmp_grph['geo_len'], tmp_grph['geo_grup']

	################
	np_tmp_gr = tmp_grph.to_numpy()
	ind_geo = list(tmp_grph.columns).index('geometry')

	lst_max = []
	i=0
	for i in range(len(np_tmp_gr)):
		one_group = str(np_tmp_gr[i][ind_geo].coords[0]) + "_" + str(np_tmp_gr[i][ind_geo].coords[-1])
		if one_group in dct_gr_len.keys():
			if (max(dct_gr_len[one_group]) == np_tmp_gr[i][ind_geo].length):
				lst_max.append(1)
			else:
				lst_max.append(None)
		elif np_tmp_gr[i][ind_geo].coords[0] == np_tmp_gr[i][ind_geo].coords[-1]:
			lst_max.append(1)
		else:
			lst_max.append(None)
	# 
	################

	try:
		tmp_grph['cut_geo'] = lst_max
	except:
		print("Error_cut_double")
	#
	#########################
	
	sleep(pause)
	
	# функция обрезки ребер
	def cut(line, distance):
		# Cuts a line in two at a distance from its starting point
		if distance <= 0.0 or distance >= line.length:
			return [LineString(line)]
		coords = list(line.coords)
		for i, p in enumerate(coords):
			pd = line.project(Point(p))
			if pd == distance:
				return [
					LineString(coords[:i+1]),
					LineString(coords[i:])]
			if pd > distance:
				cp = line.interpolate(distance)
				return [
					LineString(coords[:i] + [(cp.x, cp.y)]),
					LineString([(cp.x, cp.y)] + coords[i:])]
	# 
	#########################
	all_ok = tmp_grph[tmp_grph.cut_geo != 1].reset_index(drop=True)
	cut_gdf = tmp_grph[tmp_grph.cut_geo == 1].reset_index(drop=True)

	################
	np_ctgdf = cut_gdf.to_numpy()
	ind_geo = list(cut_gdf.columns).index('geometry')

	big_lst = []
	i=0
	for i in (range(len(np_ctgdf))):
		line = np_ctgdf[i][ind_geo]
		lst_one_geo = cut(line, (line.length / 2))
		one_list = list(np_ctgdf[i,:ind_geo]) + [lst_one_geo[0]] + list(np_ctgdf[i,ind_geo+1:])
		two_list = list(np_ctgdf[i,:ind_geo]) + [lst_one_geo[1]] + list(np_ctgdf[i,ind_geo+1:])
		big_lst.append(one_list)
		big_lst.append(two_list)
	# 
	sleep(pause)

	big_gdf = gpd.GeoDataFrame(big_lst, columns=list(cut_gdf.columns))
	################ 
	big_gdf = all_ok.append(big_gdf).reset_index(drop=True)

	del big_gdf['cut_geo']

	big_gdf.crs = 'epsg:4326'
	big_gdf = big_gdf.to_crs('epsg:32637')
	#########################
	
	sleep(pause)
	
	############################################

	##############

	def make_graph_great_again(gdf):
		G = momepy.gdf_to_nx(gdf, approach='primal')
		
		# выбор наибольшего графа из подграфов 
		# (когда ребра удаляются выше, остаются подвешенные куски графа, их надо удалить)
		 # whatever graph you're working with
		cur_graph = G
	#	 def del_subgraphs(cur_graph):
		list_subgraphs = [cur_graph]
		if not nx.is_connected(cur_graph):
			# get a list of unconnected networks
			def connected_component_subgraphs(cur_graph):
				for c in nx.connected_components(cur_graph):
					yield cur_graph.subgraph(c)
			sub_graphs = connected_component_subgraphs(cur_graph)
			list_graph = []
			i=0
			for i in sub_graphs:
				list_graph.append(i)

			main_graph = list_graph[0]
			list_subgraphs = []
			#list_subgraphs.append(main_graph)

			# find the largest network in that list
			for sg in list_graph:
				if len(sg.nodes()) > len(main_graph.nodes()):
					main_graph = sg
				else:
					list_subgraphs.append(sg)
			try:
				list_subgraphs.remove(main_graph)
			except:
				pass
			cur_graph = main_graph
			#
		#####
		
		#create gdfs
		# формирование таблиц из графа и узлов (nodes)
		nodes, new_graph = momepy.nx_to_gdf(cur_graph)

		return nodes, new_graph

	all_nodes, all_edges = make_graph_great_again(big_gdf)
	
	sleep(pause)

	#################################################

	# all_graph = momepy.gdf_to_nx(big_gdf)
	# all_nodes, all_edges = momepy.nx_to_gdf(all_graph)

	all_edges.crs = 'epsg:32637'
	all_edges = all_edges.to_crs('epsg:4326')
	all_nodes.crs = 'epsg:32637'
	all_nodes = all_nodes.to_crs('epsg:4326')



	########### Check node_from and node_to - change if wrong ###########

	check_points = all_edges.copy()

	check_points = check_points.rename(columns={'geometry': 'line_geometry'})

	check_points['nodeID'] = check_points['node_start']
	check_points = check_points.merge(all_nodes, how='left', on=['nodeID'])
	check_points = check_points.rename(columns={'geometry': 'start_geometry'})

	check_points['nodeID'] = check_points['node_end']
	check_points = check_points.merge(all_nodes, how='left', on=['nodeID'])
	check_points = check_points.rename(columns={'geometry': 'end_geometry'})

	del check_points['nodeID']

	##################
	np_cp = check_points.to_numpy()
	ind_ne = list(check_points.columns).index('node_end')
	ind_ns = list(check_points.columns).index('node_start')
	ind_sg = list(check_points.columns).index('start_geometry')
	ind_eg = list(check_points.columns).index('end_geometry')
	ind_lg = list(check_points.columns).index('line_geometry')

	# check_points['start_true'] = None
	# check_points['end_true'] = None
	
	sleep(pause)

	list_check_start = []
	list_check_end = []
	i=0
	for i in (range(len(np_cp))):
		if np_cp[i][ind_sg].coords[0] == np_cp[i][ind_lg].coords[0]:
			list_check_start.append(np_cp[i][ind_ns])
		elif np_cp[i][ind_eg].coords[0] == np_cp[i][ind_lg].coords[0]:
			list_check_start.append(np_cp[i][ind_ne])
		else:
			list_check_start.append(None)
	# 

	for ii in (range(len(np_cp))):
		if np_cp[ii][ind_eg].coords[0] == np_cp[ii][ind_lg].coords[-1]:
			list_check_end.append(np_cp[ii][ind_ne])
		elif np_cp[ii][ind_sg].coords[0] == np_cp[ii][ind_lg].coords[-1]:
			list_check_end.append(np_cp[ii][ind_ns])
		else:
			list_check_end.append(None)
	#
	##################

	try:
		check_points['start_true'] = list_check_start
		check_points['end_true'] = list_check_end
	except:
		print("Error1")
	# 
	
	sleep(pause)

	########### delete oneways reverse #############

	ok_oneway = check_points.copy()

	ok_oneway = ok_oneway.reset_index(drop=True)
	ok_oneway = ok_oneway.reset_index()
	ok_oneway = ok_oneway.rename(columns={'index':'link_id'})
	ok_oneway['link_id'] = ok_oneway['link_id'] + 1

	ok_oneway = ok_oneway[['link_id', 'osm_id', 'name', 'highway', 'z_order', 'other_tags',
						   'line_geometry', 'direction', 'mm_len', 'start_true', 'end_true']]
	ok_oneway = ok_oneway.rename(columns={'line_geometry':'geometry', 
										  'start_true':'node_start', 
										  'end_true':'node_end'})
	graph_full = ok_oneway.copy()
	graph_full = gpd.GeoDataFrame(graph_full)
	graph_full.crs = 'epsg:4326'

	graph_full['z_order'] = graph_full['z_order'].astype(np.int64)
	graph_full['mm_len'] = round((graph_full['mm_len'] / 1000), 3)
	
	sleep(pause)

	############
	# create num_lanes column
	np_gf = graph_full.to_numpy()
	ind_ot = list(graph_full.columns).index('other_tags')
	ind_dir = list(graph_full.columns).index('direction')

	list_lanes = []
	reg = re.compile('[^0-9]')
	
	sleep(pause)

	lst_onw=['oneway"=>"yes','oneway"=>"1','oneway"=>"true', 'oneway"=>"-1']
	i=0
	for i in range(len(np_gf)):
		str1 = str(np_gf[i][ind_ot])
		if any((c in str1) for c in lst_onw):
			if np_gf[i][ind_dir] == 'direct':
				if '"lanes"=>"' in str1:
					str2 = str1[str1.find('"lanes"=>"') : ].split(",", 1)[0]
					int_lanes = int(reg.sub('', str2))
					list_lanes.append(int_lanes)
				else:
					list_lanes.append(1)
			else:
				list_lanes.append(0)
		else:
			if '"lanes"=>"' in str1:
				str2 = str1[str1.find('"lanes"=>"') : ].split(",", 1)[0]
				int_lanes = int(reg.sub('', str2))
				if int_lanes > 1:
					if np_gf[i][ind_dir] == 'direct':
						list_lanes.append(math.ceil(int_lanes/2))
					else:
						list_lanes.append(math.floor(int_lanes/2))
				else:
					list_lanes.append(1)
			else:
				list_lanes.append(1)
	# 
	
	sleep(pause)
	############

	try:
		graph_full['NUMLANES'] = list_lanes
	except:
		print("Error2")
	#  

	#############
	np_gf = graph_full.to_numpy()
	ind_ot = list(graph_full.columns).index('other_tags')
	lst_types = []
	
	sleep(pause)
	
	i=0
	for i in (range(len(np_gf))):
		if "railway" in str(np_gf[i][ind_ot]):
			if '=>"tram"' in str(np_gf[i][ind_ot]):
				if 'surface' in str(np_gf[i][ind_ot]):
					lst_types.append("TM,CAR,BUS,TB,MT")
				else:
					lst_types.append("TM")
			elif 'subway' in str(np_gf[i][ind_ot]):
				lst_types.append("MTR")
			else:
				lst_types.append("E")
		else:
			if (
				('psv"=>"only"' in str(np_gf[i][ind_ot]))
				|
				(('psv"=>"yes"' in str(np_gf[i][ind_ot]))
					& ('vehicle"=>"no"' in str(np_gf[i][ind_ot])))):
				lst_types.append("BUS,TB,MT")
			else:
				lst_types.append("CAR,BUS,TB,MT")
	#
	
	sleep(pause)
	#############

	try:
		graph_full['TSYSSET'] = lst_types
	except:
		print("Error3")
	# 

	##############################
	# add type link

	################
	# add type link
	np_gf = graph_full.to_numpy()
	ind_ot = list(graph_full.columns).index('other_tags')
	ind_hw = list(graph_full.columns).index('highway')
	ind_nm = list(graph_full.columns).index('name')
	ind_nl = list(graph_full.columns).index('NUMLANES')
	
	sleep(pause)

	lst_typeno = []
	i=0
	for i in range(len(np_gf)):
		if "railway" in str(np_gf[i][ind_ot]):
			if "subway" in str(np_gf[i][ind_ot]):
				lst_typeno.append(10)
			elif "tram" in str(np_gf[i][ind_ot]):
				lst_typeno.append(40)
			else:
				lst_typeno.append(20)
		elif np_gf[i][ind_nl] == 0:
			lst_typeno.append(0)
		else:
			if np_gf[i][ind_hw] in ['motorway','trunk']:
				lst_typeno.append(1)
			elif np_gf[i][ind_hw] == 'primary':
				lst_typeno.append(2)
			elif np_gf[i][ind_hw] == 'secondary':
				lst_typeno.append(3)
			elif np_gf[i][ind_hw] == 'tertiary':
				lst_typeno.append(4)
			elif np_gf[i][ind_hw] in ['motorway_link','trunk_link', 'primary_link', 'secondary_link', 'tertiary_link']:
				lst_typeno.append(5)
			elif np_gf[i][ind_nm] != None:
				lst_typeno.append(6)
			else:
				lst_typeno.append(7)
	# 
	
	sleep(pause)
	################

	try:
		graph_full['TYPENO_2'] = lst_typeno
	except:
		print("Error_typeno")

	##############################

	graph_full.crs='epsg:4326'
	graph_full = graph_full.rename(columns={'link_id':'NO', 'mm_len':'LENGTH', 
											'node_start':'FROMNODENO', 'node_end':'TONODENO'})
	#
	return graph_full, all_nodes
Ejemplo n.º 11
0
            routes = skeleton_routes(skeletons, min_length)

        except _SignalAlarm, e:
            # An alarm signal here means that graph_routes() went overtime.
            raise _GraphRoutesOvertime(skeletons)

        points += sum(map(len, routes))
        lines.extend([simplify_line_vw(route, min_area) for route in routes])

    logging.debug('selected %d final points from %d graph route points' %
                  (sum(map(len, lines)), points))

    if not lines:
        return False

    return MultiLineString(lines)


def graph_routes(graph, find_longest, time_coefficient=0.02):
    """ Return a list of routes through a network as (x, y) pair lists, with no edge repeated.
    
        Use a thread timer to check for time overruns; see _graph_routes_main()
        for in-thread logic.
    """
    #
    # Before we do anything else, set a time limit to deal with the occasional
    # halting problem on larger graphs. Use a threading Timer to check time.
    #
    time_limit = 3 + int(ceil(time_coefficient * graph.number_of_nodes()))

    try:
Ejemplo n.º 12
0
def route_multi(filename,
                monosat_args,
                maxflow_enforcement_level,
                flowgraph_separation_enforcement_style=0,
                graph_separation_enforcement_style=1,
                heuristicEdgeWeights=False):
    (width,
     height), diagonals, nets, constraints, disabled = pcrt.read(filename)
    print(filename)
    print("Width = %d, Height = %d, %d nets, %d constraints" %
          (width, height, len(nets), len(constraints)))
    if diagonals:
        print(
            "45-degree routing enabled. Warning: 45-degree routing is untested, and may be buggy."
        )
    else:
        print("90-degree routing")

    if (len(monosat_args) > 0):
        args = " ".join(monosat_args)
        print("Monosat args: " + args)
        Monosat().init(args)

    graphs = []
    all_graphs = []
    for _ in nets:
        # for each net to be routed, created a separate symbolic graph.
        # later we will add constraints to force each edge to be enabled in at
        # most one of these graphs
        graphs.append(Graph())

        if heuristicEdgeWeights:
            # this enables a heuristic on these graphs, from the RUC paper,
            # which sets assigned edges to zero weight, to encourage edge-reuse
            # in solutions
            graphs[-1].assignWeightsTo(1)

        all_graphs.append(graphs[-1])

    flow_graph = None
    flow_graph_edges = dict()
    flow_grap_edge_list = collections.defaultdict(list)
    if maxflow_enforcement_level >= 1:
        # if flow constraints are used, create a separate graph which will
        # contain the union of all the edges enabled in the above graphs
        flow_graph = Graph()
        all_graphs.append(flow_graph)

    print("Building grid")
    out_grid = dict()
    in_grid = dict()
    vertex_grid = dict()
    vertices = dict()
    fromGrid = dict()
    for g in all_graphs:
        out_grid[g] = dict()
        in_grid[g] = dict()
        vertex_grid[g] = dict()

    for x in range(width):
        for y in range(height):
            vertices[(x, y)] = []
            for g in all_graphs:
                out_node = g.addNode("%d_%d" % (x, y))
                out_grid[g][(x, y)] = out_node
                fromGrid[out_node] = (x, y)

                in_node = g.addNode("in_%d_%d" % (x, y))
                in_grid[g][(x, y)] = in_node
                fromGrid[in_node] = (x, y)

                if (g != flow_graph):
                    # a large-enough weight, only relevant if
                    # heuristicEdgeWeights > 0
                    weight = 1 if not heuristicEdgeWeights else 1000
                    edge = g.addEdge(in_node, out_node, weight)
                else:
                    # add an edge with constant capacity of 1
                    edge = g.addEdge(in_node, out_node, 1)
                vertex_grid[g][(x, y)] = edge

                if (g != flow_graph):
                    vertices[(x, y)].append(edge)

    print("Adding edges")
    disabled_nodes = set(disabled)
    undirected_edges = dict()
    start_nodes = set()
    end_nodes = set()
    net_nodes = set()
    for net in nets:
        start_nodes.add(net[0])
        # you can pick any of the terminals in the net to be the starting node
        # for the routing constraints; it might be a good idea to randomize this
        # choice
        for n in net[1:]:
            end_nodes.add(n)
        for (x, y) in net:
            net_nodes.add((x, y))

    # create undirected edges between neighbouring nodes
    def addEdge(n, r, diagonal_edge=False):
        e = None
        if n not in disabled_nodes and r not in disabled_nodes:
            if n in net_nodes or r in net_nodes:
                allow_out = True
                allow_in = True
                if n in start_nodes or r in end_nodes:
                    allow_in = False
                if n in end_nodes or r in start_nodes:
                    allow_out = False
                assert (not (allow_in and allow_out))
                if allow_out:
                    # for each net's symbolic graph (g), create an edge
                    edges = []
                    for g in graphs:
                        # add a _directed_ edge from n to r
                        eg = (g.addEdge(out_grid[g][n], in_grid[g][r]))
                        if e is None:
                            e = eg
                        undirected_edges[eg] = e
                        edges.append(eg)
                        if not diagonal_edge:
                            Assert(eg)

                    if flow_graph is not None:
                        # create the same edge in the flow graph
                        # add a _directed_ edge from n to r
                        ef = (flow_graph.addEdge(out_grid[flow_graph][n],
                                                 in_grid[flow_graph][r]))

                        if flowgraph_separation_enforcement_style > 0:
                            flow_graph_edges[(n, r)] = ef
                            flow_graph_edges[(r, n)] = ef
                            flow_grap_edge_list[n].append(ef)
                            flow_grap_edge_list[r].append(ef)
                        else:
                            if not diagonal_edge:
                                Assert(ef)
                        edges.append(ef)
                    if (diagonal_edge):
                        AssertEq(*edges)
                elif allow_in:
                    # for each net's symbolic graph (g), create an edge
                    edges = []
                    for g in graphs:
                        # add a _directed_ edge from n to r
                        eg = (g.addEdge(out_grid[g][r], in_grid[g][n]))
                        if e is None:
                            e = eg
                        undirected_edges[eg] = e
                        edges.append(eg)
                        if not diagonal_edge:
                            Assert(eg)

                    if flow_graph is not None:
                        # create the same edge in the flow graph
                        # add a _directed_ edge from n to r
                        ef = (flow_graph.addEdge(out_grid[flow_graph][r],
                                                 in_grid[flow_graph][n]))

                        if flowgraph_separation_enforcement_style > 0:
                            flow_graph_edges[(n, r)] = ef
                            flow_graph_edges[(r, n)] = ef
                            flow_grap_edge_list[n].append(ef)
                            flow_grap_edge_list[r].append(ef)
                        else:
                            if not diagonal_edge:
                                Assert(ef)
                        edges.append(ef)
                    if (diagonal_edge):
                        AssertEq(*edges)

                else:
                    e = None
            else:
                edges = []
                # for each net's symbolic graph (g), create an edge in both
                # directions
                for g in graphs:
                    # add a _directed_ edge from n to r
                    eg = (g.addEdge(out_grid[g][n], in_grid[g][r]))
                    if e is None:
                        e = eg
                    undirected_edges[eg] = e
                    if not diagonal_edge:
                        Assert(eg)
                    eg2 = (g.addEdge(out_grid[g][r], in_grid[g][n]))
                    if not diagonal_edge:
                        Assert(eg2)
                    else:
                        AssertEq(eg, eg2)
                    undirected_edges[eg2] = e  # map e2 to e
                    edges.append(eg)
                if flow_graph is not None:
                    # add a _directed_ edge from n to r
                    ef = (flow_graph.addEdge(out_grid[flow_graph][r],
                                             in_grid[flow_graph][n]))
                    # add a _directed_ edge from r to n
                    ef2 = (flow_graph.addEdge(out_grid[flow_graph][n],
                                              in_grid[flow_graph][r]))
                    edges.append(ef)
                    if flowgraph_separation_enforcement_style > 0:
                        AssertEq(ef, ef2)
                        flow_grap_edge_list[n].append(ef)
                        flow_grap_edge_list[r].append(ef)
                        flow_graph_edges[(n, r)] = ef
                        flow_graph_edges[(r, n)] = ef
                    else:
                        if not diagonal_edge:
                            Assert(ef)
                            Assert(ef2)
                    if (diagonal_edge):
                        AssertEq(*edges)

        return e

    # create all the symbolic edges.
    for x in range(width):
        for y in range(height):
            n = (x, y)
            if n in disabled_nodes:
                continue
            if x < width - 1:
                r = (x + 1, y)
                e = addEdge(n, r)

            if y < height - 1:
                r = (x, y + 1)
                e = addEdge(n, r)

            if diagonals:
                # if 45 degree routing is enabled, create diagonal edges here
                diag_up = None
                diag_down = None
                if x < width - 1 and y < height - 1:
                    r = (x + 1, y + 1)
                    e = addEdge(n, r, True)
                    diag_down = e

                if x > 0 and y < height - 1 and False:
                    r = (x - 1, y + 1)
                    e = addEdge(n, r, True)
                    diag_up = e

                if diag_up and diag_down:
                    AssertNand(diag_up,
                               diag_down)  #cannot route both diagonals

    vertex_used = None

    # enforce constraints from the .pcrt file
    if len(constraints) > 0:
        print("Enforcing constraints")
        vertex_used = dict()
        for x in range(width):
            for y in range(height):
                # A vertex is used exactly if one of its edges is enabled
                vertex_used[(x, y)] = Or(vertices[(x, y)])

        for constraint in constraints:
            # a constraint is a list of vertices of which at most one can be used
            vertex_used_list = []
            for node in constraint:
                vertex_used_list.append(vertex_used[node])
            AMO(vertex_used_list)

    uses_bv = (flow_graph and flowgraph_separation_enforcement_style >= 2) or \
              (graph_separation_enforcement_style >= 2)

    print("Enforcing separation")
    #force each vertex to be in at most one graph.
    for x in range(width):
        for y in range(height):
            n = (x, y)
            if n not in net_nodes:
                if graph_separation_enforcement_style <= 1:
                    # use at-most-one constraint to force each edge to be
                    # assigned to at most on net
                    AMO(vertices[n])
                else:
                    # rely on the uniqueness bv encoding below to force at most
                    # one graph assign per vertex
                    assert (uses_bv)

                if flow_graph is not None or uses_bv:

                    if vertex_used is None:
                        # only create this lazily, if needed
                        vertex_used = dict()
                        for x in range(width):
                            for y in range(height):
                                # A vertex is used exactly if one of its edges
                                # is enabled
                                vertex_used[(x, y)] = Or(vertices[(x, y)])
                    if flow_graph is not None:
                        # Assert that iff this vertex is in _any_ graph, it must
                        # be in the flow graph
                        AssertEq(vertex_used[(x, y)],
                                 vertex_grid[flow_graph][n])

    if uses_bv:
        # Optionally, use a bitvector encoding to determine which graph each
        # edge belongs to (following the RUC paper).
        vertices_bv = dict()
        bvwidth = math.ceil(math.log(len(nets) + 1, 2))
        print("Building BV (width = %d)" % (bvwidth))

        # bv 0 means unused
        for x in range(width):
            for y in range(height):
                # netbv = BitVector(bvwidth)
                netbv = [Var() for _ in range(bvwidth)]
                # this is just for error checking this script
                seen_bit = [False] * bvwidth
                for b in range(bvwidth):
                    # if the vertex is not used, set the bv to 0
                    AssertImplies(~vertex_used[(x, y)], ~netbv[b])

                for i in range(len(nets)):
                    net_n = i + 1
                    seen_any_bits = False
                    for b in range(bvwidth):
                        bit = net_n & (1 << b)
                        if (bit):
                            AssertImplies(vertices[(x, y)][i], netbv[b])
                            seen_bit[b] = True
                            seen_any_bits = True
                        else:
                            AssertImplies(vertices[(x, y)][i], ~netbv[b])
                    # AssertImplies(vertices[(x,y)][i],(netbv.eq(net_n)))
                    assert (seen_any_bits)
                if graph_separation_enforcement_style < 3:
                    # rely on the above constraint
                    # AssertImplies(~vertex_used[(x, y)], ~netbv[b])
                    # to ensure that illegal values of netbv are disallowed
                    pass
                elif graph_separation_enforcement_style == 3:
                    # directly rule out disallowed bit patterns
                    # len(nets)+1, because 1 is added to each net id for the
                    # above calculation (so that 0 can be reserved for no net)
                    for i in range(len(nets) + 1, (1 << bvwidth)):
                        bits = []
                        for b in range(bvwidth):
                            bit = i & (1 << b)
                            if bit > 0:
                                bits.append(netbv[b])
                            else:
                                bits.append(~netbv[b])
                        # at least one of these bits cannot be assigned this way
                        AssertNand(bits)

                # all bits must have been set to 1 at some point, else the above
                # constraints are buggy
                assert (all(seen_bit))
                vertices_bv[(x, y)] = netbv

    # the following constraints are only relevant if maximum flow constraints
    # are being used. These constraints ensure that in the maximum flow graph,
    # edges are not connected between different nets.
    if flow_graph and flowgraph_separation_enforcement_style == 1:
        print("Enforcing (redundant) flow separation")
        # if two neighbouring nodes belong to different graphs, then
        # disable the edge between them.
        for x in range(width):
            for y in range(height):
                n = (x, y)

                if x < width - 1:
                    r = (x + 1, y)
                    if (n, r) in flow_graph_edges:
                        # if either end point is not is disabled, disable this
                        # edge... this is not technically required (since flow
                        # already cannot pass through unused vertices), but
                        # cheap to enforce and slightly reduces the search
                        # space.
                        AssertImplies(
                            Or(Not(vertex_used[n]), Not(vertex_used[r])),
                            Not(flow_graph_edges[(n, r)]))
                        any_same = false()
                        for i in range(len(vertices[n])):
                            # Enable this edge if both vertices belong to the
                            # same graph
                            same_graph = And(vertices[n][i], vertices[r][i])
                            AssertImplies(same_graph, flow_graph_edges[(n, r)])
                            any_same = Or(any_same, same_graph)
                            # Assert that if vertices[n] != vertices[r], then
                            # flow_graph_edges[(n, r)] = false
                            for j in range(i + 1, len(vertices[r])):
                                # if the end points of this edge belong to
                                # different graphs, disable them.
                                AssertImplies(
                                    And(vertices[n][i], vertices[r][j]),
                                    Not(flow_graph_edges[(n, r)]))
                        AssertEq(flow_graph_edges[(n, r)], any_same)

                if y < height - 1:
                    r = (x, y + 1)
                    if (n, r) in flow_graph_edges:
                        # if either end point is not is disabled, disable this
                        # edge... this is not technically required (since flow
                        # already cannot pass through unused vertices), but
                        # cheap to enforce and slightly reduces the search space.
                        AssertImplies(
                            Or(Not(vertex_used[n]), Not(vertex_used[r])),
                            Not(flow_graph_edges[(n, r)]))
                        any_same = false()
                        for i in range(len(vertices[n])):
                            # Enable this edge if both vertices belong to the
                            # same graph
                            same_graph = And(vertices[n][i], vertices[r][i])
                            AssertImplies(same_graph, flow_graph_edges[(n, r)])
                            any_same = Or(any_same, same_graph)

                            # Assert that if vertices[n] != vertices[r], then
                            # flow_graph_edges[(n,r)] = false
                            for j in range(i + 1, len(vertices[r])):
                                # if the end points of this edge belong to
                                # different graphs, disable them.
                                AssertImplies(
                                    And(vertices[n][i], vertices[r][j]),
                                    Not(flow_graph_edges[(n, r)]))
                        AssertEq(flow_graph_edges[(n, r)], any_same)

    elif flow_graph and flowgraph_separation_enforcement_style == 2:
        print("Enforcing (redundant) BV flow separation")
        for x in range(width):
            for y in range(height):
                n = (x, y)

                if x < width - 1:
                    r = (x + 1, y)
                    if (n, r) in flow_graph_edges:
                        # if either end point is not is disabled, disable this
                        # edge... this is not technically required (since flow
                        # already cannot pass through unused vertices), but
                        # cheap to enforce and slightly reduces the search space.
                        # AssertImplies(Or(Not(vertex_used[n]),Not(vertex_used[r])),
                        # Not(flow_graph_edges[(n, r)]))

                        # And(vertices[n][i], vertices[r][i])
                        same_graph = BVEQ(vertices_bv[n], vertices_bv[r])
                        AssertEq(And(vertex_used[n], same_graph),
                                 flow_graph_edges[(n, r)])

                if y < height - 1:
                    r = (x, y + 1)
                    if (n, r) in flow_graph_edges:
                        # if either end point is not is disabled, disable this
                        # edge... this is not technically required (since flow
                        # already cannot pass through unused vertices),
                        # but cheap to enforce and slightly reduces the search
                        # space.
                        # AssertImplies(Or(Not(vertex_used[n]), Not(vertex_used[r])),
                        # Not(flow_graph_edges[(n, r)]))

                        # And(vertices[n][i], vertices[r][i])
                        same_graph = BVEQ(vertices_bv[n], vertices_bv[r])
                        AssertEq(And(vertex_used[n], same_graph),
                                 flow_graph_edges[(n, r)])

    for i, net in enumerate(nets):
        for n in net:
            # terminal nodes must be assigned to this graph
            Assert(vertices[n][i])
            if (flow_graph):
                # force the terminal nodes to be enabled in the flow graph
                Assert(vertex_grid[flow_graph][n])

    print("Enforcing reachability")
    reachset = dict()
    # In each net's corresponding symbolic graph, enforce that the first
    # terminal of the net reaches each other terminal of the net.
    for i, net in enumerate(nets):
        reachset[i] = dict()
        n1 = net[0]
        # It is a good idea for all reachability constraints to have a common
        # source as that allows monosat to compute their paths simultaneously,
        # cheaply. Any of the terminals could be chosen to be that source; we
        # use net[0], arbitrarily.
        for n2 in net[1:]:
            g = graphs[i]
            r = g.reaches(in_grid[g][n1], out_grid[g][n2])
            reachset[i][n2] = r
            Assert(r)

            # decide reachability before considering regular variable decisions.
            r.setDecisionPriority(1)
            # That prioritization is required for the RUC heuristics to take
            # effect.

    if maxflow_enforcement_level >= 1:
        print("Enforcing flow constraints")

        # This adds an (optional) redundant maximum flow constraint. While the
        # flow constraint is not by itself powerful enough to enforce a correct
        # routing (and so must be used in conjunction with the routing
        # constraints above), it can allow the solver to prune parts of the
        # search space earlier than the routing constraints alone.

        # add a source and dest node, with 1 capacity from source to each net
        # start vertex, and 1 capacity from each net end vertex to dest
        g = flow_graph
        source = g.addNode()
        dest = g.addNode()
        for net in nets:
            Assert(g.addEdge(source, in_grid[g][net[0]], 1))  # directed edges!
            Assert(g.addEdge(out_grid[g][net[1]], dest, 1))  # directed edges!
        # create a maximum flow constraint
        m = g.maxFlowGreaterOrEqualTo(source, dest, len(nets))
        Assert(m)  # assert the maximum flow constraint

        # These settings control how the maximum flow constraints interact
        # heuristically with the routing constraints.
        if maxflow_enforcement_level == 3:
            # sometimes make decisions on the maxflow predicate.
            m.setDecisionPriority(1)
        elif maxflow_enforcement_level == 4:
            # always make decisions on the maxflow predicate.
            m.setDecisionPriority(2)
        else:
            # never make decisions on the maxflow predicate.
            m.setDecisionPriority(-1)

    print("Solving...")

    if Solve():

        def vid(x, y):
            return str(int(y) * width + int(x))

        def reduce_linestring(linestring):
            if len(linestring.coords) < 3:
                return linestring
            coords = [linestring.coords[0]]
            for i, end in enumerate(linestring.coords[2:]):
                start = linestring.coords[i]
                test = linestring.coords[i + 1]
                if not LineString([start, end]).contains(Point(test)):
                    coords.append(test)
            coords.append(linestring.coords[-1])
            return LineString(coords)

        print("Solved!")

        filename = filename.split('.')
        filename[-1] = 'out.pcrt'
        filename = '.'.join(filename)

        nets_coords = []
        for i, net in enumerate(nets):
            nets_coords.append(set())
            for n2 in net[1:]:
                r = reachset[i][n2]
                g = graphs[i]
                path = g.getPath(r)
                for n in path:
                    nets_coords[-1].add(fromGrid[n])

        nets_lines = []
        for i, net_coord in enumerate(nets_coords):
            #print('net_coord:', [vid(x, y) for x, y in net_coord])
            nets_lines.append([])

            lines = []
            for a, b in itertools.combinations(net_coord, 2):
                if Point(a).distance(Point(b)) == 1:
                    lines.append(LineString([a, b]))
            mls = linemerge(MultiLineString(lines))
            if not isinstance(mls, MultiLineString):
                line = reduce_linestring(mls).coords
                #print('line:', [vid(x, y) for x, y in line])
                nets_lines[-1].append(line)
            else:
                for line in mls:
                    line = reduce_linestring(line).coords
                    #print('line:', [vid(x, y) for x, y in line])
                    nets_lines[-1].append(line)

        nets = nets_lines
        with open(filename, 'w') as f:
            print('G', width, height, file=f)
            for net in nets:
                segs = [
                    ','.join([vid(x, y) for x, y in net_seg])
                    for net_seg in net
                ]
                print('N', *segs, file=f)

        print("s SATISFIABLE")
    else:
        print("s UNSATISFIABLE")
        sys.exit(1)
Ejemplo n.º 13
0
def test_tonumpy():
    point = Point(10, 10)
    line = LineString([(0, 0), (0, 1), (1, 1), (1, 0), (0, 0)])
    polygon = Polygon([(0, 0), (0, 1), (1, 1), (1, 0), (0, 0)])
    mp = MultiPoint([(0, 0), (1, 1), (1, 2), (2, 2)])
    ml = MultiLineString([[(0, 0), (1, 1)], [(1, 2), (2, 2)]])
    polygon2 = Polygon([(0, 0), (0, 1), (1, 1), (1, 0), (0, 0)], [
        LineString([(0.25, 0.25), (0.25, 0.75), (0.75, 0.75), (0.75, 0.25),
                    (0.25, 0.25)])
    ])
    mpol = MultiPolygon([polygon, polygon2])

    np.testing.assert_array_equal(point.np, np.array([[10, 10]]))
    np.testing.assert_array_equal(
        line.np,
        np.array([[0, 0, 0], [1, 0, 1], [2, 1, 1], [3, 1, 0], [0, 0, 0]]))
    np.testing.assert_almost_equal(
        line._np(isNorm=True),
        np.array([[0, 0., 0.0, 0.70710678, 0.70710678, 0., 0.],
                  [1, 0, 1., 0.70710678, -0.70710678, 0., 1.],
                  [2, 1., 1., -0.70710678, -0.70710678, 1., 1.],
                  [3, 1., 0., -0.70710678, 0.70710678, 1., 0.],
                  [0, 0, 0, 0.70710678, 0.70710678, 0., 0.]]))
    np.testing.assert_almost_equal(
        line._np(isNorm=True, onPoint=False),
        np.array([[0., 0., 0.5, 1., -0., 0., 0.],
                  [1., 0.5, 1., 0., -1., 0., 1.],
                  [2., 1., 0.5, -1., -0., 1.,
                   1.], [3., 0.5, 0., 0., 1., 1., 0.],
                  [0., 0., 0.5, 1., -0., 0., 0.]]))

    np.testing.assert_array_equal(
        polygon.np,
        np.array([[0, 0, 0, 0], [0, 1, 0, 1], [0, 2, 1, 1], [0, 3, 1, 0],
                  [0, 0, 0, 0]]))
    np.testing.assert_array_equal(mp.np,
                                  np.array([[0, 0], [1, 1], [1, 2], [2, 2]]))

    np.testing.assert_array_equal(
        ml.np,
        np.array([
            [0, 0, 0, 0],
            [0, 1, 1, 1],
            [1, 0, 1, 2],
            [1, 1, 2, 2],
        ]))

    np.testing.assert_array_equal(
        mpol.np,
        np.array([
            [0., 0., 0., 0., 0.],
            [0., 0., 1., 0., 1.],
            [0., 0., 2., 1., 1.],
            [0., 0., 3., 1., 0.],
            [0., 0., 0., 0., 0.],
            [1., 0., 0., 0., 0.],
            [1., 0., 1., 0., 1.],
            [1., 0., 2., 1., 1.],
            [1., 0., 3., 1., 0.],
            [1., 0., 0., 0., 0.],
            [1., 1., 0., 0.25, 0.25],
            [1., 1., 1., 0.25, 0.75],
            [1., 1., 2., 0.75, 0.75],
            [1., 1., 3., 0.75, 0.25],
            [1., 1., 0, 0.25, 0.25],
        ]))

    np.testing.assert_array_equal(
        line._np(isSegment=True),
        np.array([
            [0., 1., 0., 0.],
            [1., 1., 0., 1.],
            [2., 1., 1., 1.],
            [3., 1., 1., 0.],
            [0., 1., 0., 0.],
        ]))
    line1 = LineString([(0, 0), (1, 0), (3, 0), (6, 0), (9, 0)])
    np.testing.assert_array_equal(
        line1._np(isSegment=True),
        np.array([
            [0., 1., 0., 0.],
            [1., 1., 1., 0.],
            [2., 2., 3., 0.],
            [3., 3., 6., 0.],
            [4., 3., 9., 0.],
        ]))
Ejemplo n.º 14
0
 def lines(self):
     return MultiLineString([p.line for p in self.particles])
Ejemplo n.º 15
0
def MetricTalwegLength(axis, **kwargs):
    """
    Calculate intercepted talweg and reference axis length
    for every swath
    """

    talweg_feature = config.filename('ax_talweg', axis=axis, **kwargs)
    refaxis_feature = config.filename('ax_refaxis', axis=axis)
    measure_raster = config.tileset().filename('ax_axis_measure', axis=axis, **kwargs)
    swath_features = config.filename('ax_valley_swaths_polygons', axis=axis, **kwargs)

    # Sort talweg segments by first point M coordinate, descending

    talweg_fids = list()
    segments = list()

    with rio.open(measure_raster) as ds:
        with fiona.open(talweg_feature) as fs:
            for feature in fs:

                fid = feature['id']
                firstm = next(ds.sample([feature['geometry']['coordinates'][0][:2]], 1))
                talweg_fids.append((fid, firstm))

    with fiona.open(talweg_feature) as fs:
        for fid, _ in reversed(sorted(talweg_fids, key=itemgetter(1))):

            feature = fs.get(fid)
            segments.append(asShape(feature['geometry']))

    with fiona.open(refaxis_feature) as fs:

        # assert len(fs) == 1
        refaxis_segments = list()

        for feature in fs:

            refaxis_segments.append(asShape(feature['geometry']))

    talweg = MultiLineString(segments)
    refaxis = MultiLineString(refaxis_segments)

    with fiona.open(swath_features) as fs:

        size = len(fs)
        gids = np.zeros(size, dtype='uint32')
        measures = np.zeros(size, dtype='float32')
        lengths = np.zeros((size, 2), dtype='float32')

        with click.progressbar(fs) as iterator:
            for k, feature in enumerate(iterator):

                gid = feature['properties']['GID']
                polygon = asShape(feature['geometry'])

                gids[k] = gid
                measures[k] = feature['properties']['M']

                talweg_length = talweg.intersection(polygon).length
                lengths[k, 0] = talweg_length

                refaxis_length = refaxis.intersection(polygon).length
                lengths[k, 1] = refaxis_length

    metrics = xr.Dataset(
        {
            'swath': ('measure', gids),
            'talweg_length': ('measure', lengths[:, 0]),
            'refaxis_length': ('measure', lengths[:, 1]),
            'swath_length': 200.0
        },
        coords={
            'axis': axis,
            'measure': measures
        })

    # Metadata

    return metrics
Ejemplo n.º 16
0
    def __init__(self, target: Point, home: Point, ip: Point,
                 coalition: Coalition) -> None:
        self._target = target
        # Normal join placement is based on the path from home to the IP. If no path is
        # found it means that the target is on a direct path. In that case we instead
        # want to enforce that the join point is:
        #
        # * Not closer to the target than the IP.
        # * Not too close to the home airfield.
        # * Not threatened.
        # * A minimum distance from the IP.
        # * Not too sharp a turn at the ingress point.
        self.ip = ShapelyPoint(ip.x, ip.y)
        self.threat_zone = coalition.opponent.threat_zone.all
        self.home = ShapelyPoint(home.x, home.y)

        self.ip_bubble = self.ip.buffer(
            coalition.doctrine.join_distance.meters)

        ip_distance = ip.distance_to_point(target)
        self.target_bubble = ShapelyPoint(target.x,
                                          target.y).buffer(ip_distance)

        # The minimum distance between the home location and the IP.
        min_distance_from_home = nautical_miles(5)

        self.home_bubble = self.home.buffer(min_distance_from_home.meters)

        excluded_zones = shapely.ops.unary_union(
            [self.ip_bubble, self.target_bubble, self.threat_zone])

        if not isinstance(excluded_zones, MultiPolygon):
            excluded_zones = MultiPolygon([excluded_zones])
        self.excluded_zones = excluded_zones

        ip_heading = target.heading_between_point(ip)

        # Arbitrarily large since this is later constrained by the map boundary, and
        # we'll be picking a location close to the IP anyway. Just used to avoid real
        # distance calculations to project to the map edge.
        large_distance = nautical_miles(400).meters
        turn_limit = 40
        ip_limit_ccw = ip.point_from_heading(ip_heading - turn_limit,
                                             large_distance)
        ip_limit_cw = ip.point_from_heading(ip_heading + turn_limit,
                                            large_distance)

        ip_direction_limit_wedge = Polygon([
            (ip.x, ip.y),
            (ip_limit_ccw.x, ip_limit_ccw.y),
            (ip_limit_cw.x, ip_limit_cw.y),
        ])

        permissible_zones = ip_direction_limit_wedge.difference(
            self.excluded_zones).difference(self.home_bubble)
        if permissible_zones.is_empty:
            permissible_zones = MultiPolygon([])
        if not isinstance(permissible_zones, MultiPolygon):
            permissible_zones = MultiPolygon([permissible_zones])
        self.permissible_zones = permissible_zones

        preferred_lines = ip_direction_limit_wedge.intersection(
            self.excluded_zones.boundary).difference(self.home_bubble)

        if preferred_lines.is_empty:
            preferred_lines = MultiLineString([])
        if not isinstance(preferred_lines, MultiLineString):
            preferred_lines = MultiLineString([preferred_lines])
        self.preferred_lines = preferred_lines
Ejemplo n.º 17
0
def _overlay_old(df1, df2, how, use_sindex=True, **kwargs):
    """Perform spatial overlay between two polygons.

    Currently only supports data GeoDataFrames with polygons.
    Implements several methods that are all effectively subsets of
    the union.

    Parameters
    ----------
    df1 : GeoDataFrame with MultiPolygon or Polygon geometry column
    df2 : GeoDataFrame with MultiPolygon or Polygon geometry column
    how : string
        Method of spatial overlay: 'intersection', 'union',
        'identity', 'symmetric_difference' or 'difference'.
    use_sindex : boolean, default True
        Use the spatial index to speed up operation if available.

    Returns
    -------
    df : GeoDataFrame
        GeoDataFrame with new set of polygons and attributes
        resulting from the overlay

    """
    allowed_hows = [
        'intersection',
        'union',
        'identity',
        'symmetric_difference',
        'difference',  # aka erase
    ]

    if how not in allowed_hows:
        raise ValueError("`how` was \"%s\" but is expected to be in %s" % \
            (how, allowed_hows))

    if isinstance(df1, GeoSeries) or isinstance(df2, GeoSeries):
        raise NotImplementedError(
            "overlay currently only implemented for GeoDataFrames")

    # Collect the interior and exterior rings
    rings1 = _extract_rings(df1)
    rings2 = _extract_rings(df2)
    mls1 = MultiLineString(rings1)
    mls2 = MultiLineString(rings2)

    # Union and polygonize
    mm = unary_union([mls1, mls2])
    newpolys = polygonize(mm)

    # determine spatial relationship
    collection = []
    for fid, newpoly in enumerate(newpolys):
        cent = newpoly.representative_point()

        # Test intersection with original polys
        # FIXME there should be a higher-level abstraction to search by bounds
        # and fall back in the case of no index?
        if use_sindex and df1.sindex is not None:
            candidates1 = [
                x.object
                for x in df1.sindex.intersection(newpoly.bounds, objects=True)
            ]
        else:
            candidates1 = [i for i, x in df1.iterrows()]

        if use_sindex and df2.sindex is not None:
            candidates2 = [
                x.object
                for x in df2.sindex.intersection(newpoly.bounds, objects=True)
            ]
        else:
            candidates2 = [i for i, x in df2.iterrows()]

        df1_hit = False
        df2_hit = False
        prop1 = None
        prop2 = None
        for cand_id in candidates1:
            cand = df1.loc[cand_id]
            if cent.intersects(cand[df1.geometry.name]):
                df1_hit = True
                prop1 = cand
                break  # Take the first hit
        for cand_id in candidates2:
            cand = df2.loc[cand_id]
            if cent.intersects(cand[df2.geometry.name]):
                df2_hit = True
                prop2 = cand
                break  # Take the first hit

        # determine spatial relationship based on type of overlay
        hit = False
        if how == "intersection" and (df1_hit and df2_hit):
            hit = True
        elif how == "union" and (df1_hit or df2_hit):
            hit = True
        elif how == "identity" and df1_hit:
            hit = True
        elif how == "symmetric_difference" and not (df1_hit and df2_hit):
            hit = True
        elif how == "difference" and (df1_hit and not df2_hit):
            hit = True

        if not hit:
            continue

        # gather properties
        if prop1 is None:
            prop1 = pd.Series(dict.fromkeys(df1.columns, None))
        if prop2 is None:
            prop2 = pd.Series(dict.fromkeys(df2.columns, None))

        # Concat but don't retain the original geometries
        out_series = pd.concat([
            prop1.drop(df1._geometry_column_name),
            prop2.drop(df2._geometry_column_name)
        ])

        out_series.index = _uniquify(out_series.index)

        # Create a geoseries and add it to the collection
        out_series['geometry'] = newpoly
        collection.append(out_series)

    # Return geodataframe with new indices
    return GeoDataFrame(collection, index=range(len(collection)))
Ejemplo n.º 18
0
def merge(child_node_a, child_node_b):
    """
    Merges two nodes together. Assume subgraphs are merged.
    Note that the nature of the graph generation ensures that there are no single-child sub-graphs
    (all parents have 2 children).
    Args:
        child_node_a: first child node to merge
        child_node_b: second child node to merge, sibling to child_node_a
    """
    global tract_coords
    # look at the heads of the two graphs, make the pairs that can be merged
    # let a be a head from child a, b be a nofe from child b
    merge_pairs = [(a, b) for a in child_node_a.heads for b in child_node_b.heads]

    # evaluate pairs to see if they are "visible" to each other - i.e. if they
    # don't cross over either node's existing lines
    final_merge_pairs = []
    for p in merge_pairs:
        p0_coords = [tract_coords[tract_coords.tract_ID == p[0]].X.values[0],
                     tract_coords[tract_coords.tract_ID == p[0]].Y.values[0]]
        p1_coords = [tract_coords[tract_coords.tract_ID == p[1]].X.values[0],
                     tract_coords[tract_coords.tract_ID == p[1]].Y.values[0]]
        newLine = LineString([p0_coords, p1_coords])
        # construct ring using a.graph and b.graph and newLine, and see if it forms a ring
        # if so, remove it from list of candidates
        ring = MultiLineString(child_node_a.graph + child_node_b.graph + [newLine])
        if ring.is_simple:
            final_merge_pairs.append(p)

    merge_pairs = [p for p in final_merge_pairs]

    if (len(merge_pairs) == 0):  # no "visible" connections, have to go back and redo merge children for both nodes
        # first, need to remove old lines from children's left & right
        # raise RuntimeError("Trapped case reached!")
        print("Backtracking!")
        if len(child_node_a.tracts) > 3:
            child_node_a.graph = []
            child_node_a.heads = []
            child_node_a.left.connecting_line = None
            child_node_a.right.connecting_line = None
            # TODO: re-merge, but blacklist current pair
            merge(child_node_a.left, child_node_a.right)
        else:
            print('Child A is a leaf!')
        if len(child_node_a.tracts) > 3:
            child_node_b.graph = []
            child_node_b.heads = []
            child_node_b.left.connecting_line = None
            child_node_b.right.connecting_line = None
            # TODO: re-merge, but blacklist current pair
            merge(child_node_b.left, child_node_b.right)
        else:
            print('Child B is a leaf!')

        # re-do visbility check
        merge_pairs = [(a, b) for a in child_node_a.heads for b in child_node_b.heads]

        final_merge_pairs = []
        for p in merge_pairs:
            p0_coords = [tract_coords[tract_coords.tract_ID == p[0]].X.values[0],
                         tract_coords[tract_coords.tract_ID == p[0]].Y.values[0]]
            p1_coords = [tract_coords[tract_coords.tract_ID == p[1]].X.values[0],
                         tract_coords[tract_coords.tract_ID == p[1]].Y.values[0]]
            newLine = LineString([p0_coords, p1_coords])
            # construct ring using a.graph and b.graph and newLine, and see if it forms a ring
            # if so, remove it from list of candidates
            ring = MultiLineString(child_node_a.graph + child_node_b.graph + [newLine])
            if ring.is_simple:
                final_merge_pairs.append(p)

        merge_pairs = [p for p in final_merge_pairs]
    else:
        # copy lines and heads into parent, plus new line (randomly chosen from merge_pairs)
        if len(merge_pairs) > 1:
            choice = merge_pairs[np.random.choice(range(len(merge_pairs)))]
        else:
            choice = merge_pairs[0]
        # print("For", child_node_a.tracts, "and", child_node_b.tracts, "choosing", choice)
        source_coords = [tract_coords[tract_coords.tract_ID == choice[0]].X.values[0],
                         tract_coords[tract_coords.tract_ID == choice[0]].Y.values[0]]
        dest_coords = [tract_coords[tract_coords.tract_ID == choice[1]].X.values[0],
                       tract_coords[tract_coords.tract_ID == choice[1]].Y.values[0]]
        connection = LineString([source_coords, dest_coords])
        child_node_a.connecting_line = connection
        child_node_b.connecting_line = connection
        child_node_a.parent.graph = child_node_a.graph + child_node_b.graph
        child_node_a.parent.graph.append(connection)
        # heads for parent are the 2 nodes that weren't connected
        if len(child_node_a.heads) == 1:
            child_node_a.parent.heads.append(child_node_a.heads[0])
        else:
            child_node_a.parent.heads.append(child_node_a.heads[1 - child_node_a.heads.index(choice[0])])
        if len(child_node_b.heads) == 1:
            child_node_a.parent.heads.append(child_node_b.heads[0])
        else:
            child_node_a.parent.heads.append(child_node_b.heads[1 - child_node_b.heads.index(choice[1])])
    # return parent as TreeNode object
    # print("Merged! Returning", child_node_a.parent.tracts, "/", child_node_b.parent.tracts)
    child_node_a.parent.graph_node()
    return child_node_a.parent  # or child_node_b.parent, it really doesn't matter
Ejemplo n.º 19
0
def test_difference():
    """Testing own implementation of geometry difference operator"""

    # Simple cases
    u = LineString([(0, 0), (2, 0)])

    v = LineString([(1, 0), (3, 0)])
    diff = difference(u, v)
    assert diff == LineString([(0, 0), (1, 0)])

    v = LineString([(3, 0), (1, 0)])
    diff = difference(u, v)
    assert diff == LineString([(0, 0), (1, 0)])

    v = LineString([(-1, 0), (1, 0)])
    diff = difference(u, v)
    assert diff == LineString([(1, 0), (2, 0)])

    v = LineString([(1, 0), (-1, 0)])
    diff = difference(u, v)
    assert diff == LineString([(1, 0), (2, 0)])

    v = LineString([(0.5, 0), (1.5, 0)])
    diff = difference(u, v)
    assert diff == MultiLineString([((0, 0), (0.5, 0)), ((1.5, 0), (2, 0))])

    v = LineString([(1.5, 0), (0.5, 0)])
    diff = difference(u, v)
    assert diff == MultiLineString([((0, 0), (0.5, 0)), ((1.5, 0), (2, 0))])

    v = LineString([(1, 0), (1, 1)])
    diff = difference(u, v)
    assert diff == u

    v = LineString([(1, 1), (1, 0)])
    diff = difference(u, v)
    assert diff == u

    v = LineString([(1, 1), (1, 2)])
    diff = difference(u, v)
    assert diff == u

    v = LineString([(0, 0), (1, 0)])
    diff = difference(u, v)
    assert diff == LineString([(1, 0), (2, 0)])

    # Case with potentially float error
    u = LineString([(0, 0), (3, 2)])
    v = LineString([(0, 0), u.interpolate(0.5, normalized=True)])
    diff = difference(u, v)
    assert diff.length == u.length / 2.

    # Case were should return empty geoemtry
    diff = difference(u, u)
    assert isinstance(diff, LineString)
    assert diff.is_empty

    # Special case that caused crash
    u = LineString([(1, 0), (0, 0)])
    v = LineString([(0, 0), (2, 0)])
    diff = difference(u, v)
    assert diff.is_empty

    # Special case that caused crash
    u = LineString([(1, 0), (0, 0)])
    v = LineString([(-2, 0), (1, 0)])
    diff = difference(u, v)
    assert diff.is_empty
def contour_extract(ds_array,
                    z_values,
                    ds_crs,
                    ds_affine,
                    output_shp,
                    min_vertices=2,
                    attribute_data=None,
                    attribute_dtypes=None,
                    dim='time',
                    verbose=True):
    """
    Uses `skimage.measure.find_contours` to extract multiple z-value contour lines from a two-dimensional array
    (e.g. multiple elevations from a single DEM), or one z-value for each array along a specified dimension of a 
    multi-dimensional array (e.g. to map waterlines across time by extracting a 0 NDVI contour from each individual 
    timestep in an xarray timeseries).    
    
    Contours are exported to file as a shapefile and returned as a geopandas geodataframe with one row per
    z-value or one row per array along a specified dimension. The `attribute_data` and `attribute_dtypes` parameters 
    can be used to pass custom attributes to the output contour features.

    Last modified: November 2018
    Author: Robbi Bishop-Taylor
    
    Parameters
    ----------  
    ds_array : xarra DataArray
        A two-dimensional or multi-dimensional array from which contours are extracted. If a two-dimensional array
        is provided, the analysis will run in 'single array, multiple z-values' mode which allows you to specify 
        multiple `z_values` to be extracted. If a multi-dimensional array is provided, the analysis will run in 
        'single z-value, multiple arrays' mode allowing you to extract contours for each array along the dimension
        specified by the `dim` parameter.  
    z_values : int, float or list of ints, floats
        An individual z-value or list of multiple z-values to extract from the array. If operating in 'single 
        z-value, multiple arrays' mode specify only a single z-value.
    ds_crs : string or CRS object
        Either a EPSG string giving the coordinate system of the array (e.g. 'EPSG:3577'), or a crs
        object (e.g. from an xarray dataset: `xarray_ds.geobox.crs`).
    ds_affine : affine.Affine object or GDAL geotransform
        Either an affine object from a rasterio or xarray object (e.g. `xarray_ds.geobox.affine`), or a gdal-derived
        geotransform object (e.g. `gdal_ds.GetGeoTransform()`) which will be converted to an affine.
    output_shp : string
        The path and filename for the output shapefile.
    min_vertices : int, optional
        The minimum number of vertices required for a contour to be extracted. The default (and minimum) value is 2,
        which is the smallest number required to produce a contour line (i.e. a start and end point). Higher values
        remove smaller contours, potentially removing noise from the output dataset.
    attribute_data : dict of lists, optional
        An optional dictionary of lists used to define custom attributes/fields to add to the shapefile. Dict keys 
        give the name of the shapefile field, while dict values must be lists of the same length as `z_values`
        (for 'single array, multiple z-values' mode) or the number of arrays along the dimension specified by the `dim`
        parameter (for 'single z-value, multiple arrays' mode). For example, if `z_values=[0, 10, 20]`, then 
        `attribute_data={'type: [1, 2, 3]}` can be used to create a shapefile field called 'type' with a value for
        each contour in the shapefile. The default is None, which produces a default shapefile field called 'z_value'
        with values taken directly from the `z_values` parameter and formatted as a 'float:9.2' ('single array, 
        multiple z-values' mode), or a field named after `dim` numbered from 0 to the total number of arrays along 
        the `dim` dimension ('single z-value, multiple arrays' mode).
    attribute_dtypes : dict, optional
        An optional dictionary giving the output dtype for each custom shapefile attribute field specified by
        `attribute_data`. For example, `attribute_dtypes={'type: 'int'}` can be used to set the 'type' field to an
        integer dtype. The dictionary should have the same keys/field names as declared in `attribute_data`.
        Valid values include 'int', 'str', 'datetime, and 'float:X.Y', where X is the minimum number of characters
        before the decimal place, and Y is the number of characters after the decimal place.
    dim : string, optional
        The name of the dimension along which to extract contours when operating in 'single z-value, multiple arrays'
        mode. The default is 'time', which extracts contours for each array along the time dimension.
    verbose: bool, optional
        Whether to print the result of each contour extraction to the console. The default is True which prints all
        results; set to False for a cleaner output, particularly when extracting large numbers of contours.

    Returns
    -------
    output_gdf : geopandas geodataframe
        A geopandas geodataframe object with one feature per z-value ('single array, multiple z-values' mode), or one
        row per array along the dimension specified by the `dim` parameter ('single z-value, multiple arrays' mode). 
        If `attribute_data` and `ttribute_dtypes` are provided, these values will be included in the shapefile's 
        attribute table.

    Example
    -------   
    >>> # Import modules
    >>> import sys
    >>> import datacube

    >>> # Import external dea-notebooks functions using relative link to Scripts directory
    >>> sys.path.append('../10_Scripts')
    >>> import SpatialTools

    >>> # Set up datacube instance
    >>> dc = datacube.Datacube(app='Contour extraction')

    ########################################
    # Single array, multiple z-values mode #
    ########################################
    
    >>> # Define an elevation query
    >>> elevation_query = {'lat': (-35.25, -35.35),
    ...                    'lon': (149.05, 149.17),
    ...                    'output_crs': 'EPSG:3577',
    ...                    'resolution': (-25, 25)}

    >>> # Import sample elevation data
    >>> elevation_data = dc.load(product='srtm_dem1sv1_0', **elevation_query)

    >>> # Extract contours
    >>> contour_gdf = SpatialTools.contour_extract(z_values=[600, 700, 800],
    ...                                            ds_array=elevation_data.dem_h,
    ...                                            ds_crs=elevation_data.geobox.crs,
    ...                                            ds_affine=elevation_data.geobox.affine,
    ...                                            output_shp='extracted_contours.shp')
    Dimension 'time' has length of 1; removing from array
    Operating in single array, multiple z-values mode
        Extracting contour 600
        Extracting contour 700
        Extracting contour 800
    Exporting contour shapefile to extracted_contours.shp
    
    ########################################
    # Single z-value, multiple arrays mode #
    ########################################
    
    >>> # Define a Landsat query
    >>> landsat_query = {'lat': (-35.25, -35.35),
    ...                  'lon': (149.05, 149.17),
    ...                  'time': ('2016-02-15', '2016-03-01'),
    ...                  'output_crs': 'EPSG:3577',
    ...                  'resolution': (-25, 25)}

    >>> # Import sample Landsat data
    >>> landsat_data = dc.load(product='ls8_nbart_albers', 
    ...                        group_by='solar_day',
    ...                        **landsat_query)
    
    >>> # Test that there are multiple arrays along the 'time' dimension
    >>> print(len(landsat_data.time))
    2

    >>> # Set up custom attributes to be added as shapefile fields
    >>> attribute_data = {'value': ['first_contour', 'second_contour']}
    >>> attribute_dtypes = {'value': 'str'}

    >>> # Extract contours
    >>> contour_gdf = SpatialTools.contour_extract(z_values=3000,
    ...                                            ds_array=landsat_data.red,
    ...                                            ds_crs=landsat_data.geobox.crs,
    ...                                            ds_affine=landsat_data.geobox.affine,
    ...                                            output_shp='extracted_contours.shp',
    ...                                            attribute_data=attribute_data,
    ...                                            attribute_dtypes=attribute_dtypes,
    ...                                            dim='time')
    Operating in single z-value, multiple arrays mode
        Extracting contour 0
        Extracting contour 1
    Exporting contour shapefile to extracted_contours.shp

    """

    # Obtain affine object from either rasterio/xarray affine or a gdal geotransform:
    if type(ds_affine) != affine.Affine:
        ds_affine = affine.Affine.from_gdal(*ds_affine)

    # If z_values is supplied is not a list, convert to list before proceeding:
    z_values = z_values if isinstance(z_values, list) else [z_values]

    # If array has only one layer along the `dim` dimension (e.g. time), remove the dim:
    try:
        ds_array = ds_array.squeeze(dim=dim)
        print(f"Dimension '{dim}' has length of 1; removing from array")

    except:
        pass

    ########################################
    # Single array, multiple z-values mode #
    ########################################

    # Output dict to hold contours for each offset
    contours_dict = collections.OrderedDict()

    # If array has only two dimensions, run in single array, multiple z-values mode:
    if len(ds_array.shape) == 2:

        print(f'Operating in single array, multiple z-values mode')

        # If no custom attributes given, default to including a single z-value field based on `z_values`
        if not attribute_data:

            # Default field uses two decimal points by default
            attribute_data = {'z_value': z_values}
            attribute_dtypes = {'z_value': 'float:9.2'}

        # If custom attributes are provided, test that they are equal in length to the number of `z-values`:
        else:

            for key, values in attribute_data.items():

                if len(values) != len(z_values):

                    raise Exception(
                        f"Supplied attribute '{key}' has length of {len(values)} while z_values has "
                        f"length of {len(z_values)}; please supply the same number of attribute values "
                        "as z_values")

        for z_value in z_values:

            # Extract contours and convert output array cell coords into arrays of coordinate reference system coords.
            # We need to add (0.5 x the pixel size) to the x and y values to correct coordinates to give the centre
            # point of pixels, rather than the top-left corner
            if verbose: print(f'    Extracting contour {z_value}')
            ps_x = ds_affine[0]  # Compute pixel x size
            ps_y = ds_affine[4]  # Compute pixel y size
            contours_geo = [
                np.column_stack(ds_affine * (i[:, 1], i[:, 0])) +
                np.array([0.5 * ps_x, 0.5 * ps_y])
                for i in find_contours(ds_array, z_value)
            ]

            # For each array of coordinates, drop any xy points that have NA
            contours_nona = [i[~np.isnan(i).any(axis=1)] for i in contours_geo]

            # Drop 0 length and add list of contour arrays to dict
            contours_withdata = [
                i for i in contours_nona if len(i) >= min_vertices
            ]

            # If there is data for the contour, add to dict:
            if len(contours_withdata) > 0:
                contours_dict[z_value] = contours_withdata

            else:
                if verbose:
                    print(f'    No data for contour {z_value}; skipping')
                contours_dict[z_value] = None

    ########################################
    # Single z-value, multiple arrays mode #
    ########################################

    # For inputs with more than two dimensions, run in single z-value, multiple arrays mode:
    else:

        # Test if only a single z-value is given when operating in single z-value, multiple arrays mode
        print(f'Operating in single z-value, multiple arrays mode')
        if len(z_values) > 1:
            raise Exception('Please provide a single z-value when operating '
                            'in single z-value, multiple arrays mode')

        # If no custom attributes given, default to including one field based on the `dim` dimension:
        if not attribute_data:

            # Default field is numbered from 0 to the number of arrays along the `dim` dimension:
            attribute_data = {dim: range(0, len(ds_array[dim]))}
            attribute_dtypes = {dim: 'int'}

        # If custom attributes are provided, test that they are equal in length to the number of arrays along `dim`:
        else:

            for key, values in attribute_data.items():

                if len(values) != len(ds_array[dim]):

                    raise Exception(
                        f"Supplied attribute '{key}' has length of {len(values)} while there are "
                        f"{len(ds_array[dim])} arrays along the '{dim}' dimension. Please supply "
                        f"the same number of attribute values as arrays along the '{dim}' dimension"
                    )

        for z_value, _ in enumerate(ds_array[dim]):

            # Extract contours and convert output array cell coords into arrays of coordinate reference system coords.
            # We need to add (0.5 x the pixel size) to the x and y values to correct coordinates to give the centre
            # point of pixels, rather than the top-left corner
            if verbose: print(f'    Extracting contour {z_value}')
            ps_x = ds_affine[0]  # Compute pixel x size
            ps_y = ds_affine[4]  # Compute pixel y size
            contours_geo = [
                np.column_stack(ds_affine * (i[:, 1], i[:, 0])) +
                np.array([0.5 * ps_x, 0.5 * ps_y]) for i in find_contours(
                    ds_array.isel({dim: z_value}), z_values[0])
            ]

            # For each array of coordinates, drop any xy points that have NA
            contours_nona = [i[~np.isnan(i).any(axis=1)] for i in contours_geo]

            # Drop 0 length and add list of contour arrays to dict
            contours_withdata = [
                i for i in contours_nona if len(i) >= min_vertices
            ]

            # If there is data for the contour, add to dict:
            if len(contours_withdata) > 0:
                contours_dict[z_value] = contours_withdata

            else:
                if verbose:
                    print(f'    No data for contour {z_value}; skipping')
                contours_dict[z_value] = None

    #######################
    # Export to shapefile #
    #######################

    # If a shapefile path is given, generate shapefile
    if output_shp:

        print(f'Exporting contour shapefile to {output_shp}')

        # Set up output multiline shapefile properties
        schema = {
            'geometry': 'MultiLineString',
            'properties': attribute_dtypes
        }

        # Create output shapefile for writing
        with fiona.open(output_shp,
                        'w',
                        crs={
                            'init': str(ds_crs),
                            'no_defs': True
                        },
                        driver='ESRI Shapefile',
                        schema=schema) as output:

            # Write each shapefile to the dataset one by one
            for i, (z_value, contours) in enumerate(contours_dict.items()):

                if contours:

                    # Create multi-string object from all contour coordinates
                    contour_multilinestring = MultiLineString(contours)

                    # Get attribute values for writing
                    attribute_vals = {
                        field_name: field_vals[i]
                        for field_name, field_vals in attribute_data.items()
                    }

                    # Write output shapefile to file with z-value field
                    output.write({
                        'properties': attribute_vals,
                        'geometry': mapping(contour_multilinestring)
                    })

    # Return dict of contour arrays
    output_gdf = gpd.read_file(output_shp)
    return output_gdf
Ejemplo n.º 21
0
    print(parsed)


# test("POINT", b"\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3e\x40\x00\x00\x00\x00\x00\x00\x24\x40")
# test("MULTIPOINT", b"\x01\x04\x00\x00\x00\x04\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x24\x40\x00\x00\x00\x00\x00\x00\x44\x40\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x44\x40\x00\x00\x00\x00\x00\x00\x3e\x40\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x34\x40\x00\x00\x00\x00\x00\x00\x34\x40\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3e\x40\x00\x00\x00\x00\x00\x00\x24\x40")
# test("MULTILINESTRING", b"\x01\x05\x00\x00\x00\x02\x00\x00\x00\x01\x02\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x24\x40\x00\x00\x00\x00\x00\x00\x24\x40\x00\x00\x00\x00\x00\x00\x34\x40\x00\x00\x00\x00\x00\x00\x34\x40\x00\x00\x00\x00\x00\x00\x24\x40\x00\x00\x00\x00\x00\x00\x44\x40\x01\x02\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x44\x40\x00\x00\x00\x00\x00\x00\x44\x40\x00\x00\x00\x00\x00\x00\x3e\x40\x00\x00\x00\x00\x00\x00\x3e\x40\x00\x00\x00\x00\x00\x00\x44\x40\x00\x00\x00\x00\x00\x00\x34\x40\x00\x00\x00\x00\x00\x00\x3e\x40\x00\x00\x00\x00\x00\x00\x24\x40")

# tests using shapely Geometry.wkb

reference("POINT", Point(1, 2))
reference("MULTIPOINT", MultiPoint([Point(1, 2), Point(1, 2)]))
reference("LINESTRING", LineString([Point(1, 2), Point(1, 2)]))
reference(
    "MULTILINESTRING",
    MultiLineString([
        LineString([Point(1, 2), Point(1, 2)]),
        LineString([Point(1, 2), Point(1, 2)])
    ]))
reference(
    "POLYGON",
    Polygon(
        LinearRing(
            [Point(0, 0),
             Point(0, 1),
             Point(1, 1),
             Point(1, 0),
             Point(0, 0)]), [
                 LinearRing([
                     Point(0.1, 0.1),
                     Point(0.1, 0.9),
                     Point(0.9, 0.9),
                     Point(0.9, 0.1),
Ejemplo n.º 22
0
 def parse(self, shape):
     """Parses coordinates or shapely object"""
     if shape:
         if hasattr(shape, 'name'):
             self.name = shape.name
         # Check for class with a geometry attribute
         try:
             shape = shape.geometry
         except AttributeError:
             pass
         if isinstance(shape, NaiveGeoMetry):
             # Transform the shape to the given CRS if necessary
             if epsg_id(self.crs) != epsg_id(shape.crs):
                 shape = shape.transform(self.crs)
             # Shape is an instance of this class
             self.verbatim = shape.verbatim
             self.verbatim_shape = shape.verbatim_shape
             self.verbatim_crs = shape.verbatim_crs
             self.geom_type = shape.geom_type
             self.shape = shape.parsed_shape
             # Attributes that only exist in a subclass will not be carried over
             for attr in self.cache:
                 try:
                     setattr(self, attr, getattr(shape, attr))
                 except AttributeError:
                     pass
             if self._radius_km is None:
                 # Setting the private attribute sidesteps the radius
                 # setter, which produces a different shape for points
                 self.radius_km = shape.radius_km
             self.subshapes = shape.subshapes
             return None
         if isinstance(shape, BaseGeometry):
             # Shape is a shapely geometry object
             return shape
         if isinstance(shape, bytes):
             return wkb.loads(shape)
         if isinstance(shape, str):
             return wkt.loads(shape)
         if isinstance(shape, dict):
             # Shape is a GeoNames-style bounding box
             lats = [shape['south'], shape['north']]
             lngs = [shape['west'], shape['east']]
             return Polygon(bounding_box(lats[0], lngs[0], lats[1],
                                         lngs[1]))
         if isinstance(shape, (list, tuple)):
             shape = shape[:]
             # Convert numpy arrays to lists
             try:
                 shape = [c.tolist() for c in shape]
             except AttributeError:
                 pass
             # Extract underlying shapely shapes from a list of geometries
             if isinstance(shape[0], NaiveGeoMetry):
                 geoms = []
                 for geom in shape:
                     shape = geom.verbatim_shape
                     geom = self.__class__(shape, crs=geom.verbatim_crs)
                     if geom.crs != self.crs:
                         geom = geom.transform(self.crs)
                     geoms.append(geom)
                 shape = [g.shape for g in geoms]
             # Lists of shapely objects
             if isinstance(shape[0], BaseGeometry):
                 if len(shape) == 1:
                     return shape[0]
                 # Shape is a list mixing multiple shapely objects
                 if len({s.geom_type for s in shape}) > 1:
                     return GeometryCollection(shape)
                 # Shape is a list of Points
                 shape_class = LineString if len(shape) == 2 else Polygon
                 try:
                     return shape_class([(p.x, p.y) for p in shape])
                 except AttributeError:
                     pass
                 # Shape is a list of Polygons
                 if isinstance(shape[0], Polygon):
                     try:
                         return MultiPolygon(shape)
                     except ValueError:
                         pass
                 # Shape is a list of LineStrings
                 if isinstance(shape[0], LineString):
                     try:
                         return MultiLineString(shape)
                     except ValueError:
                         pass
             # Shape is a list of coordinates
             list_of_lists = isinstance(shape[0], (list, tuple))
             try:
                 list_of_pairs = all([len(c) == 2 for c in shape[:10]])
             except TypeError:
                 list_of_pairs = False
             if list_of_lists and list_of_pairs:
                 # Shape is [(lat, lng)] or [(lat1, lng1),...]
                 lat_lngs = list(shape)
             elif list_of_lists:
                 # Shape is [lats, lngs]
                 lat_lngs = list(zip(*shape))
             elif len(shape) == 2:
                 # Shape is (lat, lng)
                 lat_lngs = [shape]
             else:
                 msg = 'Parse failed: {} (unknown format)'.format(shape)
                 logger.error(msg)
                 raise ValueError(msg)
             # Ensure that coordinates are floats
             lats = []
             lngs = []
             for lat, lng in lat_lngs:
                 lats.append(self.parse_coordinate(lat, 'latitude'))
                 lngs.append(self.parse_coordinate(lng, 'longitude'))
             # Convert coordinates to shapely geometry
             xy = list(zip(lngs, lats))
             if len(xy) == 1:
                 return Point(xy[0])
             if len(xy) == 2:
                 return LineString(xy)
             return Polygon(xy)
     msg = 'Parse failed: {} (empty)'.format(shape)
     raise ValueError(msg)
Ejemplo n.º 23
0
def saf_retreat_date(interp_point, write_river_section=None):
    """
    Returns the date st anthony falls passed interp_point
    """
    utm_15n = {'init': 'epsg:26915'}
    wgs84 = {'init': 'epsg:4326'}

    #### Read in Mississippi River
    mr_path = r'C:\Users\Jeff Disbrow\Documents\UMN-Drive\Fall19\ESCI8701\project\miss_river.shp'
    mr = shape(next(iter(fiona.open(mr_path)))['geometry'])
    mr = linemerge(mr)
    mr = MultiLineString([line for line in mr])
    mr = linemerge(mr)

    #### Split MR at SAF, Hidden Falls, St. Paul
    ## Point locations
    saf = Point(-93.2476297, 44.9791802)
    hf = Point(-93.1958309, 44.9063822)
    stp = Point(-93.07419, 44.94705)
    points = gpd.GeoDataFrame({'loc': ['saf', 'hf', 'stp']},
                              geometry=[saf, hf, stp],
                              crs=wgs84)
    points = points.to_crs(utm_15n)

    mr_df = gpd.GeoDataFrame(geometry=[l for l in mr])

    # Snap points to MR for splitting, then get back lines as shapely geometries
    mr_df = gpd.GeoDataFrame(geometry=[mr])
    pts_snapped = snap2closest_line(points, mr_df, tolerance=10000)
    saf = pts_snapped[pts_snapped['loc'] == 'saf'].geometry.values[0]
    hf = pts_snapped[pts_snapped['loc'] == 'hf'].geometry.values[0]
    stp = pts_snapped[pts_snapped['loc'] == 'stp'].geometry.values[0]

    ## Split
    # Select only relevent section of MR (one LineString from MultiLineString)
    mr = [l for l in mr if l.distance(hf) < 100][0]
    # Create splitters
    hf_splitter = create_splitter(hf.x, hf.y, frac=0.0005)
    saf_splitter = create_splitter(saf.x, saf.y, frac=0.0005)
    stp_splitter = create_splitter(stp.x, stp.y, frac=0.0005)
    # Split to get just sections: stp->hf->saf
    below_saf, _above_saf = split(mr, saf_splitter)
    _below_stp, above_stp = split(below_saf, stp_splitter)
    below_hf, above_hf = split(above_stp, hf_splitter)

    if write_river_section is not None:
        river_section = gpd.GeoDataFrame(geometry=[below_hf, above_hf],
                                         crs=utm_15n)
        river_section.to_file(write_river_section)

    #### Interpolate position
    ## 12 -> 13.4 below HF -> stp
    ## direction starts at stp
    len_below = below_hf.length
    dist_arr_below = [0, len_below]
    time_arr_below = [13.4, 12]

    ## 12 -> 0 above HF -> SAF
    ## direction starts at hf
    len_above = above_hf.length
    dist_arr_above = [0, len_above]
    time_arr_above = [12, 0]

    ## Determine if iterp point is above or below hf
    if above_hf.distance(interp_point) < below_hf.distance(interp_point):
        date = np.interp(above_hf.project(interp_point), dist_arr_above,
                         time_arr_above)
    else:
        date = np.interp(below_hf.project(interp_point), dist_arr_below,
                         time_arr_below)

    return date


### Debug plotting
#fig, ax = plt.subplots(1,1,figsize=(8,8))
#splitter = gpd.GeoDataFrame(geometry=[hf_splitter, saf_splitter, stp_splitter])
#splits = gpd.GeoDataFrame(geometry=[above_hf])
##tmr = gpd.GeoDataFrame(geometry=[r for r in mr], crs=utm_15n)
#mr_df['rand'] = np.random.randint(1, 6, mr_df.shape[0])
#splits['rand'] = np.random.randint(1, 6, splits.shape[0])
#
##mr_df.plot(column='rand', ax=ax)
#splitter.plot(ax=ax)
#splits.plot(column='rand', ax=ax)
#points.plot(ax=ax)
#test = above_hf.interpolate(100)
#t = gpd.GeoDataFrame(geometry=[test])
#t.plot(ax=ax)
##pts_snapped.plot(ax=ax, color='blue')
        # add the edge from the graph
        graphs[i].add_edge(edge[0], edge[1])
    else:
        # we couldn't find a graph this edge should belong to
        new_graph = nx.Graph()
        new_graph.add_edge(edge[0], edge[1])
        graphs.append(new_graph)


poly_lines = []
for graph in graphs:
    if graph.number_of_edges() > 0:
        line_segments = []
        for edge in graph.edges():
            line_segments.append(LineString([edge[0], edge[1]]))
        multi_line = MultiLineString(line_segments)
        poly_line = ops.linemerge(multi_line)
        poly_lines.append(poly_line)

"""
Simplify geometry of roads
"""

new_poly_lines = []
for poly_line in poly_lines:
    new_poly_lines.append(smooth(poly_line))

"""
Decompose the poly_lines into line_segments
"""
line_segments = []
Ejemplo n.º 25
0
def floodplain_connectivity(vbet_network: Path,
                            vbet_polygon: Path,
                            roads: Path,
                            railroads: Path,
                            output_dir: Path,
                            debug_gpkg: Path = None):
    """[summary]

    Args:
        vbet_network (Path): Filtered Flowline network used to generate VBET. Final selection is based on this intersection.
        vbet_polygon (Path): Vbet polygons with clipped NHD Catchments
        roads (Path): Road network
        railroads (Path): railroad network
        out_polygon (Path): Output path and layer name for floodplain polygons
        debug_gpkg (Path, optional): geopackage for saving debug layers (may substantially increase processing time). Defaults to None.
    """

    log = Logger('Floodplain Connectivity')
    log.info("Starting Floodplain Connectivity Script")

    out_polygon = os.path.join(output_dir, 'fconn.gpkg/outputs')

    # Prepare vbet and catchments
    geom_vbet = get_geometry_unary_union(vbet_polygon)
    geoms_raw_vbet = list(load_geometries(vbet_polygon, None).values())
    listgeoms = []
    for geom in geoms_raw_vbet:
        if geom.geom_type == "MultiPolygon":
            for g in geom:
                listgeoms.append(g)
        else:
            listgeoms.append(geom)
    geoms_vbet = MultiPolygon(listgeoms)

    # Clip Transportation Network by VBET
    log.info("Merging Transportation Networks")
    # merge_feature_classes([roads, railroads], geom_vbet, os.path.join(debug_gpkg, "Transportation")) TODO: error when calling this method
    geom_roads = get_geometry_unary_union(roads)
    geom_railroads = get_geometry_unary_union(railroads)
    geom_transportation = geom_roads.union(
        geom_railroads) if geom_railroads is not None else geom_roads
    log.info("Clipping Transportation Network by VBET")
    geom_transportation_clipped = geom_vbet.intersection(geom_transportation)
    if debug_gpkg:
        quicksave(debug_gpkg, "Clipped_Transportation",
                  geom_transportation_clipped, ogr.wkbLineString)

    # Split Valley Edges at transportation intersections
    log.info("Splitting Valley Edges at transportation network intersections")
    geom_vbet_edges = MultiLineString(
        [geom.exterior for geom in geoms_vbet] +
        [g for geom in geoms_vbet for g in geom.interiors])
    geom_vbet_interior_pts = MultiPoint([
        Polygon(g).representative_point() for geom in geom_vbet
        for g in geom.interiors
    ])

    if debug_gpkg:
        quicksave(debug_gpkg, "Valley_Edges_Raw", geom_vbet_edges,
                  ogr.wkbLineString)

    vbet_splitpoints = []
    vbet_splitlines = []
    counter = 0
    for geom_edge in geom_vbet_edges:
        counter += 1
        log.info('Splitting edge features {}/{}'.format(
            counter, len(geom_vbet_edges)))
        if geom_edge.is_valid:
            if not geom_edge.intersects(geom_transportation):
                vbet_splitlines = vbet_splitlines + [geom_edge]
                continue
            pts = geom_transportation.intersection(geom_edge)
            if pts.is_empty:
                vbet_splitlines = vbet_splitlines + [geom_edge]
                continue
            if isinstance(pts, Point):
                pts = [pts]
            geom_boundaries = [geom_edge]

            progbar = ProgressBar(len(geom_boundaries), 50, "Processing")
            counter = 0
            for pt in pts:
                # TODO: I tried to break this out but I'm not sure
                new_boundaries = []
                for line in geom_boundaries:
                    if line is not None:
                        split_line = line_splitter(line, pt)
                        progbar.total += len(split_line)
                        for new_line in split_line:
                            counter += 1
                            progbar.update(counter)
                            if new_line is not None:
                                new_boundaries.append(new_line)
                geom_boundaries = new_boundaries
                # TODO: Not sure this is having the intended effect
                # geom_boundaries = [new_line for line in geom_boundaries if line is not None for new_line in line_splitter(line, pt) if new_line is not None]
            progbar.finish()
            vbet_splitlines = vbet_splitlines + geom_boundaries
            vbet_splitpoints = vbet_splitpoints + [pt for pt in pts]

    if debug_gpkg:
        quicksave(debug_gpkg, "Split_Points", vbet_splitpoints, ogr.wkbPoint)
        quicksave(debug_gpkg, "Valley_Edges_Split", vbet_splitlines,
                  ogr.wkbLineString)

    # Generate Polygons from lines
    log.info("Generating Floodplain Polygons")
    geom_lines = unary_union(
        vbet_splitlines + [geom_tc for geom_tc in geom_transportation_clipped])
    geoms_areas = [
        geom for geom in polygonize(geom_lines)
        if not any(geom.contains(pt) for pt in geom_vbet_interior_pts)
    ]

    if debug_gpkg:
        quicksave(debug_gpkg, "Split_Polygons", geoms_areas, ogr.wkbPolygon)

    # Select Polygons by flowline intersection
    log.info("Selecting connected floodplains")
    geom_vbet_network = get_geometry_unary_union(vbet_network)
    geoms_connected = []
    geoms_disconnected = []
    progbar = ProgressBar(len(geoms_areas), 50, f"Running polygon selection")
    counter = 0
    for geom in geoms_areas:
        progbar.update(counter)
        counter += 1
        if geom_vbet_network.intersects(geom):
            geoms_connected.append(geom)
        else:
            geoms_disconnected.append(geom)

    log.info("Union connected floodplains")
    geoms_connected_output = [
        geom for geom in list(unary_union(geoms_connected))
    ]
    geoms_disconnected_output = [
        geom for geom in list(unary_union(geoms_disconnected))
    ]

    # Save Outputs
    log.info("Save Floodplain Output")
    with GeopackageLayer(out_polygon, write=True) as out_lyr:
        out_lyr.create_layer(ogr.wkbPolygon, epsg=4326)
        out_lyr.create_field("Connected", ogr.OFTInteger)
        progbar = ProgressBar(
            len(geoms_connected_output) + len(geoms_disconnected_output), 50,
            f"saving {out_lyr.ogr_layer_name} features")
        counter = 0
        for shape in geoms_connected_output:
            progbar.update(counter)
            counter += 1
            out_lyr.create_feature(shape, attributes={"Connected": 1})
        for shape in geoms_disconnected_output:
            progbar.update(counter)
            counter += 1
            out_lyr.create_feature(shape, attributes={"Connected": 0})
Ejemplo n.º 26
0
def getGreatCircleFromFloats(starty, startx, endy, endx, geod=Geodesic.WGS84):
    """
    Utility function for calculating great circle routes. Uses
    floats for inputs; these are expected to be derived 
    from WGS84 coordinates because we've initialised geod
    as WGS84 Geodesic object.
    
    starty -- the starting latitude point in WGS84
    startx -- the starting longitude point in WGS84
    endy -- the ending latitude point in WGS84
    endx -- the ending longitude point in WGS84
    geod -- a geodesic object (default: Geodesic.WGS84)
    """
    # If we couldn't get useable coordinates
    # then just return None since we can't
    # actually draw the circle.
    if np.isnan(starty) or np.isnan(startx) or np.isnan(endy) or np.isnan(
            endx):
        return None

    # We don't particularly care about the Y, but having
    # the startx be less than than that start y helps to
    # ensure that the line gets drawn consistently. Otherwise
    # you might see most of the lines following a normal
    # path and then one that runs much further to the East
    # or West because it should be wrapping around and isn't.
    if (startx > endx):
        (tmpx, tmpy) = (endx, endy)
        (endx, endy) = (startx, starty)
        (startx, starty) = (tmpx, tmpy)

    # The bulk of the work is simply drawing this inverse
    # line now that we've got the coordinates sorted out.
    l = geod.InverseLine(
        float(starty), float(startx), float(endy), float(endx),
        Geodesic.LATITUDE | Geodesic.LONGITUDE | Geodesic.LONG_UNROLL)

    # Initialise the arc information
    da = 1
    n = int(math.ceil(l.a13 / da))
    da = l.a13 / n

    # Try to deal with break at -180 (ca. int'l dateline)
    ml = list()  # list of line lists
    line = list()  # empty list for a new line
    lastLon = None  # track where we jump across the dateline

    for i in range(n + 1):
        a = da * i
        g = l.ArcPosition(
            a, Geodesic.LATITUDE | Geodesic.LONGITUDE | Geodesic.LONG_UNROLL)

        if view == 'Default':
            if lastLon is not None and ((g['lon2'] <= -180 and lastLon >= -180)
                                        or
                                        (g['lon2'] >= 180 and lastLon <= 180)):
                if len(line) > 1:
                    #print("Breaking line at: " + str(line))
                    ml.append(line)
                    line = list()
                    #print("--- break ---")
        elif view == 'Alternate':
            if lastLon is not None and ((g['lon2'] <= -330 and lastLon >= -330)
                                        or
                                        (g['lon2'] >= 30 and lastLon <= 30)):
                if len(line) > 1:
                    #print("Breaking line at: " + str(line))
                    ml.append(line)
                    line = list()
                    #print("--- break ---")

        # Copy to a new variable to make it
        # easier to see what's happening
        x = g['lon2']
        y = g['lat2']

        if view == 'Default':  # Standard Robinson projection centered on Atlantic
            if x < -180:
                x = 180 - (abs(x) - 180)
            elif x > 180:
                x = -180 + (abs(x) - 180)
        elif view == 'Alternate':  # Robinson projection centered on Pacific
            if x < -330:
                x = 30 - (abs(x) - 330)
            elif x > 30:
                x = -330 + (abs(x) - 30)
        line.append(Point(x, y))
        lastLon = g['lon2']
        #print "{:.5f} {:.5f}".format(g['lat2'], g['lon2'])

    # Append the last line that was running
    # unless it's a single point, in which
    # case we want to stick it on to the
    # previous multi-line string
    if len(line) > 1:
        ml.append(line)

# Now assemble this into a MultiLineString
# object that can be written to a shapefile
    for i in range(len(ml)):
        ml[i] = LineString(ml[i])
        #LineString(lineElems)
    return MultiLineString(ml)
Ejemplo n.º 27
0
def airspace_image(cache_key, airspace_id):
    if not mapscript_available:
        abort(404)

    # get information from cache...
    infringements = cache.get('upload_airspace_infringements_' + cache_key)
    flight_path = cache.get('upload_airspace_flight_path_' + cache_key)

    # abort if invalid cache key
    if not infringements \
       or not flight_path:
        abort(404)

    # Convert the coordinate into a list of tuples
    coordinates = [(c.location['longitude'], c.location['latitude']) for c in flight_path]
    # Create a shapely LineString object from the coordinates
    linestring = LineString(coordinates)
    # Save the new path as WKB
    locations = from_shape(linestring, srid=4326)

    highlight_locations = []
    extent_epsg4326 = [180, 85.05112878, -180, -85.05112878]

    for period in infringements[airspace_id]:
        # Convert the coordinate into a list of tuples
        coordinates = [(c['location']['longitude'], c['location']['latitude']) for c in period]

        # Create a shapely LineString object from the coordinates
        if len(coordinates) == 1:
            # a LineString must contain at least two points...
            linestring = LineString([coordinates[0], coordinates[0]])
        else:
            linestring = LineString(coordinates)

        highlight_locations.append(linestring)

        # gather extent
        (minx, miny, maxx, maxy) = linestring.bounds

        extent_epsg4326[0] = min(extent_epsg4326[0], minx)
        extent_epsg4326[1] = min(extent_epsg4326[1], miny)
        extent_epsg4326[2] = max(extent_epsg4326[2], maxx)
        extent_epsg4326[3] = max(extent_epsg4326[3], maxy)

    # Save the new path as WKB
    highlight_multilinestring = from_shape(MultiLineString(highlight_locations), srid=4326)

    # increase extent by factor 1.05
    width = abs(extent_epsg4326[0] - extent_epsg4326[2])
    height = abs(extent_epsg4326[1] - extent_epsg4326[3])

    center_x = (extent_epsg4326[0] + extent_epsg4326[2]) / 2
    center_y = (extent_epsg4326[1] + extent_epsg4326[3]) / 2

    extent_epsg4326[0] = center_x - width / 2 * 1.05
    extent_epsg4326[1] = center_y - height / 2 * 1.05
    extent_epsg4326[2] = center_x + width / 2 * 1.05
    extent_epsg4326[3] = center_y + height / 2 * 1.05

    # minimum extent should be 0.3 deg
    width = abs(extent_epsg4326[0] - extent_epsg4326[2])
    height = abs(extent_epsg4326[1] - extent_epsg4326[3])

    if width < 0.3:
        extent_epsg4326[0] = center_x - 0.15
        extent_epsg4326[2] = center_x + 0.15

    if height < 0.3:
        extent_epsg4326[1] = center_y - 0.15
        extent_epsg4326[3] = center_y + 0.15

    # convert extent from EPSG4326 to EPSG3857
    epsg4326 = pyproj.Proj(init='epsg:4326')
    epsg3857 = pyproj.Proj(init='epsg:3857')

    x1, y1 = pyproj.transform(epsg4326, epsg3857, extent_epsg4326[0], extent_epsg4326[1])
    x2, y2 = pyproj.transform(epsg4326, epsg3857, extent_epsg4326[2], extent_epsg4326[3])

    extent_epsg3857 = [x1, y1, x2, y2]

    # load basemap and set size + extent
    basemap_path = os.path.join(current_app.config.get('SKYLINES_MAPSERVER_PATH'), 'basemap.map')
    map_object = mapscript.mapObj(basemap_path)
    map_object.setSize(400, 400)
    map_object.setExtent(extent_epsg3857[0], extent_epsg3857[1], extent_epsg3857[2], extent_epsg3857[3])

    # enable airspace and airports layers
    num_layers = map_object.numlayers
    for i in range(num_layers):
        layer = map_object.getLayer(i)

        if layer.group == 'Airports':
            layer.status = mapscript.MS_ON

        if layer.group == 'Airspace':
            layer.status = mapscript.MS_ON

    # get flights layer
    flights_layer = map_object.getLayerByName('Flights')
    highlight_layer = map_object.getLayerByName('Flights_Highlight')

    # set sql query for blue flight
    one = literal_column('1 as flight_id')
    flight_query = db.session.query(locations.label('flight_geometry'), one)

    flights_layer.data = 'flight_geometry FROM (' + query_to_sql(flight_query) + ')' + \
                         ' AS foo USING UNIQUE flight_id USING SRID=4326'

    # set sql query for highlighted linestrings
    highlighted_query = db.session.query(highlight_multilinestring.label('flight_geometry'), one)

    highlight_layer.data = 'flight_geometry FROM (' + query_to_sql(highlighted_query) + ')' + \
                           ' AS foo USING UNIQUE flight_id USING SRID=4326'

    highlight_layer.status = mapscript.MS_ON

    # get osm layer and set WMS url
    osm_layer = map_object.getLayerByName('OSM')
    osm_layer.connection = current_app.config.get('SKYLINES_MAP_TILE_URL') + \
        '/service?'

    # draw map
    map_image = map_object.draw()

    # get image
    mapscript.msIO_installStdoutToBuffer()
    map_image.write()
    content = mapscript.msIO_getStdoutBufferBytes()

    # return to client
    resp = make_response(content)
    resp.headers['Content-type'] = map_image.format.mimetype
    return resp
Ejemplo n.º 28
0
def get_centerline(geom,
                   segmentize_maxlen=0.5,
                   max_points=3000,
                   simplification=0.05,
                   smooth_sigma=5):
    """
    Return centerline from geometry.

    Parameters:
    -----------
    geom : shapely Polygon or MultiPolygon
    segmentize_maxlen : Maximum segment length for polygon borders.
        (default: 0.5)
    max_points : Number of points per geometry allowed before simplifying.
        (default: 3000)
    simplification : Simplification threshold.
        (default: 0.05)
    smooth_sigma : Smoothness of the output centerlines.
        (default: 5)

    Returns:
    --------
    geometry : LineString or MultiLineString

    Raises:
    -------
    CenterlineError : if centerline cannot be extracted from Polygon
    TypeError : if input geometry is not Polygon or MultiPolygon

    """
    logger.debug("geometry type %s", geom.geom_type)

    if geom.geom_type == "Polygon":
        # segmentized Polygon outline
        outline = _segmentize(geom.exterior, segmentize_maxlen)
        logger.debug("outline: %s", outline)

        # simplify segmentized geometry if necessary and get points
        outline_points = outline.coords
        simplification_updated = simplification
        while len(outline_points) > max_points:
            # if geometry is too large, apply simplification until geometry
            # is simplified enough (indicated by the "max_points" value)
            simplification_updated += simplification
            outline_points = outline.simplify(simplification_updated).coords
        logger.debug("simplification used: %s", simplification_updated)
        logger.debug("simplified points: %s", MultiPoint(outline_points))

        # calculate Voronoi diagram and convert to graph but only use points
        # from within the original polygon
        vor = Voronoi(outline_points)
        graph = _graph_from_voronoi(vor, geom)
        logger.debug("voronoi diagram: %s",
                     _multilinestring_from_voronoi(vor, geom))

        # determine longest path between all end nodes from graph
        end_nodes = _get_end_nodes(graph)
        if len(end_nodes) < 2:
            logger.debug("Polygon has too few points")
            raise CenterlineError("Polygon has too few points")
        logger.debug("get longest path from %s end nodes", len(end_nodes))
        longest_paths = _get_longest_paths(end_nodes, graph)
        if not longest_paths:
            logger.debug("no paths found between end nodes")
            raise CenterlineError("no paths found between end nodes")
        if logger.getEffectiveLevel() <= 10:
            logger.debug("longest paths:")
            for path in longest_paths:
                logger.debug(LineString(vor.vertices[path]))

        # get least curved path from the five longest paths, smooth and
        # return as LineString
        centerline = _smooth_linestring(
            LineString(vor.vertices[_get_least_curved_path(
                longest_paths, vor.vertices)]), smooth_sigma)
        logger.debug("centerline: %s", centerline)
        logger.debug("return linestring")
        return centerline

    elif geom.geom_type == "MultiPolygon":
        logger.debug("MultiPolygon found with %s sub-geometries", len(geom))
        # get centerline for each part Polygon and combine into MultiLineString
        sub_centerlines = []
        for subgeom in geom:
            try:
                sub_centerline = get_centerline(subgeom, segmentize_maxlen,
                                                max_points, simplification,
                                                smooth_sigma)
                sub_centerlines.append(sub_centerline)
            except CenterlineError as e:
                logger.debug("subgeometry error: %s", e)
        # for MultPolygon, only raise CenterlineError if all subgeometries fail
        if sub_centerlines:
            return MultiLineString(sub_centerlines)
        else:
            raise CenterlineError("all subgeometries failed")

    else:
        raise TypeError(
            "Geometry type must be Polygon or MultiPolygon, not %s" %
            geom.geom_type)
Ejemplo n.º 29
0
def main():
    """Go Main, please"""
    pgconn = get_dbconn("postgis")
    cursor = pgconn.cursor()
    cursor.execute("""
    DELETE from roads_current
    """)
    print("removed %s rows from roads_current" % (cursor.rowcount, ))
    req = requests.get(URI, timeout=30)
    jobj = req.json()
    archive_begin = "2018-10-09 00:00"
    print("adding %s rows to roads_base" % (len(jobj["features"]), ))
    for feat in jobj["features"]:
        props = feat["attributes"]
        # Geometry is [[pt]] and we only have single segments
        path = MultiLineString([LineString(feat["geometry"]["paths"][0])])
        # segid is defined by the database insert
        major = props["ROUTE_NAME"]
        minor = props["NAMEID"].split(":", 1)[1]
        (typ, num) = major.replace("-", " ").split()
        int1 = num if typ == "I" else None
        us1 = num if typ == "US" else None
        st1 = num if typ == "IA" else None
        if major == "Airline Highway":
            num = 0
        sys_id = props["ROUTE_RANK"]
        longname = props["LONG_NAME"]
        geom = ("ST_Transform(ST_SetSrid(ST_GeomFromText('%s'), 3857), 26915)"
                ) % (path.wkt)
        idot_id = props["SEGMENT_ID"]
        cursor.execute(
            """
            INSERT into roads_base (major, minor, us1, st1, int1, type,
            longname, geom, idot_id, archive_begin)
            VALUES (%s, %s, %s, %s, %s, %s, %s,
            """ + geom + """, %s, %s) RETURNING segid
        """,
            (
                major[:10],
                minor,
                us1,
                st1,
                int1,
                sys_id,
                longname,
                idot_id,
                archive_begin,
            ),
        )
        segid = cursor.fetchone()[0]
        cursor.execute(
            """
            UPDATE roads_base
            SET simple_geom = ST_Simplify(geom, 0.01) WHERE segid = %s
        """,
            (segid, ),
        )
        # Figure out which WFO this segment is in...
        cursor.execute(
            """
            SELECT u.wfo,
            ST_Distance(u.geom, ST_Transform(b.geom, 4326))
            from ugcs u, roads_base b WHERE
            substr(ugc, 1, 2) = 'IA' and b.segid = %s
            and u.end_ts is null ORDER by ST_Distance ASC
        """,
            (segid, ),
        )
        wfo = cursor.fetchone()[0]
        cursor.execute(
            """
            UPDATE roads_base SET wfo = %s WHERE segid = %s
        """,
            (wfo, segid),
        )
        # Add a roads_current entry, 85 is a hack
        cursor.execute(
            """
            INSERT into roads_current(segid, valid, cond_code)
            VALUES (%s, %s, 85)
        """,
            (segid, archive_begin),
        )
    cursor.close()
    pgconn.commit()
Ejemplo n.º 30
0
    def _add_hatching(self, orientation=HATCHING_ORIENTATION_45, distance=2, wiggle=0, bounding_box=None):

        minx, miny, maxx, maxy = 0, 0, self.dimensions[0], self.dimensions[1]

        if bounding_box is not None:
            minx, miny, maxx, maxy = bounding_box
            minx = int(minx/distance) * distance
            miny = int(miny/distance) * distance
            maxx = (int(maxx/distance) + 1) * distance
            maxy = (int(maxy/distance) + 1) * distance

        height = maxy-miny
        width = maxx-minx

        num_lines = (width + height)/float(distance)

        if orientation == self.HATCHING_ORIENTATION_HORIZONTAL:
            num_lines = width/float(distance)

        if orientation == self.HATCHING_ORIENTATION_VERTICAL:
            num_lines = height/float(distance)

        north = [[minx, miny], [maxx, miny]]
        south = [[minx, maxy], [maxx, maxy]]
        west  = [[minx, miny], [minx, maxy]]
        east  = [[maxx, miny], [maxx, maxy]]

        hatchlines = []

        wiggle_range = [-wiggle, +wiggle]

        for i in range(0, int(num_lines)):

            random_error_1 = 0
            random_error_2 = 0

            if wiggle > 0:
                random_error_1 = random.uniform(*wiggle_range)
                random_error_2 = random.uniform(*wiggle_range)

            if orientation == self.HATCHING_ORIENTATION_45:
                x1 = minx
                y1 = miny + i * distance
                x2 = minx + i * distance
                y2 = miny
                y1 += random_error_1
                x2 += random_error_2
            elif orientation == self.HATCHING_ORIENTATION_45_REV:
                x1 = maxx
                y1 = miny + i * distance
                x2 = maxx - i * distance
                y2 = miny
                y1 += random_error_1
                x2 += random_error_2
            elif orientation == self.HATCHING_ORIENTATION_VERTICAL:
                x1 = minx + i * distance
                y1 = miny
                x2 = x1
                y2 = maxy
                x1 += random_error_1
                x2 += random_error_2
            elif orientation == self.HATCHING_ORIENTATION_HORIZONTAL:
                x1 = minx
                y1 = miny + i * distance
                x2 = maxx
                y2 = y1
                y1 += random_error_1
                y2 += random_error_2

            else:
                raise Exception("unknown hatching orientation type: {}".format(orientation))

            hatching_line = [[x1, y1], [x2, y2]]
            cropped_line = []

            north_intersect = SvgWriter._line_intersection(hatching_line, north)
            south_intersect = SvgWriter._line_intersection(hatching_line, south)
            west_intersect = SvgWriter._line_intersection(hatching_line, west)
            east_intersect = SvgWriter._line_intersection(hatching_line, east)

            if west_intersect is not None:
                cropped_line.append(west_intersect)

            if south_intersect is not None:
                cropped_line.append(south_intersect)

            if north_intersect is not None:
                cropped_line.append(north_intersect)

            if east_intersect is not None:
                cropped_line.append(east_intersect)

            if len(cropped_line) == 2:
                hatchlines.append(LineString(cropped_line))
            elif len(cropped_line) > 2:
                hatchlines.append(LineString([cropped_line[0], cropped_line[2]]))

        # if len(hatchlines) == 0:
        #     raise Exception("no hatchlines created for distance {}".format(distance))

        return MultiLineString(hatchlines)