Пример #1
0
def generate_rbox(im_size, polys, tags):
    """
    score map is (128, 128, 1) with shrinked poly
    poly mask is (128, 128, 1) with differnt colors


    geo map is  (128, 128, 5) with
    """
    h, w = im_size
    poly_mask = np.zeros((h, w), dtype=np.uint8)
    score_map = np.zeros((h, w), dtype=np.uint8)
    geo_map = np.zeros((h, w, 5), dtype=np.float32)
    # mask used during traning, to ignore some hard areas
    training_mask = np.ones((h, w), dtype=np.uint8)
    for poly_idx, poly_tag in enumerate(zip(polys, tags)):
        poly = poly_tag[0]
        tag = poly_tag[1]
        poly = np.array(poly)
        tag = np.array(tag)
        r = [None, None, None, None]
        for i in range(4):
            r[i] = min(np.linalg.norm(poly[i] - poly[(i + 1) % 4]),
                       np.linalg.norm(poly[i] - poly[(i - 1) % 4]))
        # score map
        shrinked_poly = shrink_poly(poly.copy(),
                                    r).astype(np.int32)[np.newaxis, :, :]
        cv2.fillPoly(score_map, shrinked_poly, 1)

        # use different color to draw poly mask
        cv2.fillPoly(poly_mask, shrinked_poly, poly_idx + 1)
        # if the poly is too small, then ignore it during training
        poly_h = min(np.linalg.norm(poly[0] - poly[3]),
                     np.linalg.norm(poly[1] - poly[2]))
        poly_w = min(np.linalg.norm(poly[0] - poly[1]),
                     np.linalg.norm(poly[2] - poly[3]))
        # if min(poly_h, poly_w) < FLAGS.min_text_size:
        if min(poly_h, poly_w) < 10:
            cv2.fillPoly(training_mask,
                         poly.astype(np.int32)[np.newaxis, :, :], 0)
        if tag:
            cv2.fillPoly(training_mask,
                         poly.astype(np.int32)[np.newaxis, :, :], 0)

        xy_in_poly = np.argwhere(poly_mask == (poly_idx + 1))
        # if geometry == 'RBOX':
        # 对任意两个顶点的组合生成一个平行四边形
        fitted_parallelograms = []
        for i in range(4):
            p0 = poly[i]
            p1 = poly[(i + 1) % 4]
            p2 = poly[(i + 2) % 4]
            p3 = poly[(i + 3) % 4]

            #fit_line ([x1, x2], [y1, y2]) return k, -1, b just a line
            edge = fit_line([p0[0], p1[0]], [p0[1], p1[1]])  #p0, p1
            backward_edge = fit_line([p0[0], p3[0]], [p0[1], p3[1]])  #p0, p3
            forward_edge = fit_line([p1[0], p2[0]], [p1[1], p2[1]])  #p1, p2

            #select shorter line
            if point_dist_to_line(p0, p1, p2) > point_dist_to_line(p0, p1, p3):
                # 平行线经过p2
                if edge[1] == 0:  #verticle
                    edge_opposite = [1, 0, -p2[0]]
                else:
                    edge_opposite = [edge[0], -1, p2[1] - edge[0] * p2[0]]
            else:
                # 经过p3
                if edge[1] == 0:
                    edge_opposite = [1, 0, -p3[0]]
                else:
                    edge_opposite = [edge[0], -1, p3[1] - edge[0] * p3[0]]
            # move forward edge
            new_p0 = p0
            new_p1 = p1
            new_p2 = p2
            new_p3 = p3
            new_p2 = line_cross_point(forward_edge, edge_opposite)
            if point_dist_to_line(p1, new_p2, p0) > point_dist_to_line(
                    p1, new_p2, p3):
                # across p0
                if forward_edge[1] == 0:
                    forward_opposite = [1, 0, -p0[0]]
                else:
                    forward_opposite = [
                        forward_edge[0], -1, p0[1] - forward_edge[0] * p0[0]
                    ]
            else:
                # across p3
                if forward_edge[1] == 0:
                    forward_opposite = [1, 0, -p3[0]]
                else:
                    forward_opposite = [
                        forward_edge[0], -1, p3[1] - forward_edge[0] * p3[0]
                    ]
            new_p0 = line_cross_point(forward_opposite, edge)
            new_p3 = line_cross_point(forward_opposite, edge_opposite)
            fitted_parallelograms.append(
                [new_p0, new_p1, new_p2, new_p3, new_p0])
            # or move backward edge
            new_p0 = p0
            new_p1 = p1
            new_p2 = p2
            new_p3 = p3
            new_p3 = line_cross_point(backward_edge, edge_opposite)
            if point_dist_to_line(p0, p3, p1) > point_dist_to_line(p0, p3, p2):
                # across p1
                if backward_edge[1] == 0:
                    backward_opposite = [1, 0, -p1[0]]
                else:
                    backward_opposite = [
                        backward_edge[0], -1, p1[1] - backward_edge[0] * p1[0]
                    ]
            else:
                # across p2
                if backward_edge[1] == 0:
                    backward_opposite = [1, 0, -p2[0]]
                else:
                    backward_opposite = [
                        backward_edge[0], -1, p2[1] - backward_edge[0] * p2[0]
                    ]
            new_p1 = line_cross_point(backward_opposite, edge)
            new_p2 = line_cross_point(backward_opposite, edge_opposite)
            fitted_parallelograms.append(
                [new_p0, new_p1, new_p2, new_p3, new_p0])

        areas = [Polygon(t).area for t in fitted_parallelograms]
        parallelogram = np.array(fitted_parallelograms[np.argmin(areas)][:-1],
                                 dtype=np.float32)
        # sort thie polygon
        parallelogram_coord_sum = np.sum(parallelogram, axis=1)
        min_coord_idx = np.argmin(parallelogram_coord_sum)
        parallelogram = parallelogram[[
            min_coord_idx, (min_coord_idx + 1) % 4, (min_coord_idx + 2) % 4,
            (min_coord_idx + 3) % 4
        ]]

        rectange = rectangle_from_parallelogram(parallelogram)
        rectange, rotate_angle = sort_rectangle(rectange)
        #print('parallel {} rectangle {}'.format(parallelogram, rectange))
        p0_rect, p1_rect, p2_rect, p3_rect = rectange
        # this is one area of many
        """
        for y, x in xy_in_poly:
            point = np.array([x, y], dtype=np.float32)
            # top
            geo_map[y, x, 0] = point_dist_to_line(p0_rect, p1_rect, point)
            # right
            geo_map[y, x, 1] = point_dist_to_line(p1_rect, p2_rect, point)
            # down
            geo_map[y, x, 2] = point_dist_to_line(p2_rect, p3_rect, point)
            # left
            geo_map[y, x, 3] = point_dist_to_line(p3_rect, p0_rect, point)
            # angle
            geo_map[y, x, 4] = rotate_angle
        """
        gen_geo_map.gen_geo_map(geo_map, xy_in_poly, rectange, rotate_angle)

    ###sum up
    # score_map , in shrinked poly is 1
    # geo_map, corresponding to score map
    # training map is less than geo_map

    return score_map, geo_map, training_mask
Пример #2
0
def read_shp(filename, encoding=None, type=False, strings_to_float=True):
    """Read shapefile to dataframe w/ geometry.
    if the reading fails, provid a type among ('polygon', 'polyline', 'point')

    Args:
        filename: ESRI shapefile name to be read  (without .shp extension)

    Returns:
        pandas DataFrame with column geometry, containing individual shapely
        Geometry objects (i.e. Point, LineString, Polygon) depending on
        the shapefiles original shape type

    """
    sr = shapefile.Reader(filename)

    cols = sr.fields[:]  # [:] = duplicate field list
    if cols[0][0] == 'DeletionFlag':
        cols.pop(0)
    cols = [col[0] for col in cols]  # extract field name only
    cols.append('geometry')

    records = [row for row in sr.iterRecords()]

    if sr.shapeType == shapefile.POLYGON or type == 'polygon':
        geometries = [
            Polygon(shape.points)
            if len(shape.points) > 2 else np.NaN  # invalid geometry
            for shape in sr.iterShapes()
        ]
    elif sr.shapeType == shapefile.POLYLINE or type == 'polyline':
        geometries = [LineString(shape.points) for shape in sr.iterShapes()]
    elif sr.shapeType == shapefile.POINT or type == 'point':
        geometries = [Point(*shape.points[0]) for shape in sr.iterShapes()]
    else:
        raise NotImplementedError

    data = [r + [g] for r, g in zip(records, geometries)]

    df = pd.DataFrame(data, columns=cols)

    if strings_to_float:
        for col in df.columns:
            try:
                # the column is a string of an int, we keep it that way
                asint = df[col].astype(int)  #  do_nothing

            except (ValueError, TypeError, OverflowError):
                # invalid literal for int() with base 10:
                try:
                    df[col] = df[col].astype(float)
                except (ValueError, TypeError):
                    pass

    if np.NaN in geometries:
        # drop invalid geometries
        df = df.dropna(subset=['geometry'])
        num_skipped = len(geometries) - len(df)
        warnings.warn('Skipped {} invalid geometrie(s).'.format(num_skipped))
    if encoding:
        return pandasdbf.convert_bytes_to_string(df,
                                                 debug=False,
                                                 encoding=encoding)
    return df
    def eval_points(self):
        try:
            while not self.__stopped:
                if not self.__data_available:
                    time.sleep(0.1)
                    continue

                with self.__vals_lock:
                    scan_msg = self.__curr_msg
                    self.__data_available = False

                # https://answers.ros.org/question/202787/using-pointcloud2-data-getting-xy-points-in-python/
                point_cloud = self.__laser_projector.projectLaser(scan_msg)

                # Publish point cloud data
                if self.__publish_point_cloud:
                    self.__pc_pub.publish(point_cloud)

                # Shift all points counter clockwise 90 degrees - switch x,y and multiply x by -1
                all_points = []
                for p in pc2.read_points(point_cloud,
                                         field_names=("x", "y", "z"),
                                         skip_nans=True):
                    x = -1 * p[1]
                    y = p[0]
                    # Track only points in front of robot -- NW and NE quadrants
                    if y >= 0:
                        all_points.append(Point2D(x, y))

                if len(all_points) == 0:
                    continue

                # Determine outer range of points
                max_dist = round(
                    max([p.origin_dist
                         for p in all_points]) * self.__max_dist_mult, 2)
                rospy.loginfo("Points: {} Max dist: {}".format(
                    len(all_points), round(max_dist, 2)))

                # Reset all slices
                [s.reset(max_dist) for s in self.__slices]

                # Assign each slice
                for p in all_points:
                    # Do int division to determine which slice the point belongs to
                    slice_index = int(p.angle / self.__slice_size)
                    self.__slices[slice_index].add_point(p)

                # Adjust for slice_offset, which is a subset of slices
                nearest_points = [s.nearest for s in self.__slices]
                if self.__slice_offset > 0:
                    nearest_points = nearest_points[self.__slice_offset:-1 *
                                                    self.__slice_offset]

                if len(nearest_points) == 0:
                    continue

                # Perform these outside of lock to prevent blocking on scan readings
                # Calculate contour and centroid
                nearest_with_origin = [Origin] + nearest_points + [Origin]
                icx = [p.x for p in nearest_with_origin]
                icy = [p.y for p in nearest_with_origin]
                polygon = Polygon(zip(icx, icy))
                poly_centroid = polygon.centroid

                # Convert to msgs
                c = Contour()
                c.max_dist = max_dist
                c.slice_size = self.__slice_size
                c.all_points = np.asarray(
                    [p.to_ros_point() for p in all_points])
                c.nearest_points = np.asarray(
                    [p.to_ros_point() for p in nearest_points])
                c.centroid = Point2D(poly_centroid.x,
                                     poly_centroid.y).to_ros_point()

                # Publish centroid and contour
                self.__centroid_pub.publish(c.centroid)
                self.__contour_pub.publish(c)

                self.__rate.sleep()

        except KeyboardInterrupt:
            # This will prevent callstack dump on exit with Ctrl+C
            pass
Пример #4
0
 def test_centroid(self):
     polygon = Polygon([(-1, -1), (1, -1), (1, 1), (-1, 1)])
     point = Point(0, 0)
     polygons = GeoSeries([polygon for i in range(3)])
     points = GeoSeries([point for i in range(3)])
     assert_geoseries_equal(polygons.centroid, points)
Пример #5
0
    def testMerge(self):
        fake_image = FakeImage(30, 11, 3)
        fake_builder = FakeTileBuilder()
        topology = fake_image.tile_topology(fake_builder, 12, 9, 2)

        tile1 = topology.tile(1)
        tile2 = topology.tile(2)
        tile3 = topology.tile(3)
        tile4 = topology.tile(4)
        tile5 = topology.tile(5)
        tile6 = topology.tile(6)

        #    0    5    10   15   20        30  (col)
        #  0 +---------+---------+---------+
        #    |         | E--F    |         |
        #    |         | |  |    |         |
        #    |         | G--H    |         |
        #  4 |         |         |         |
        #    |    A----z----B    |  I---J  |
        #    |    |    |    |    |  |   |  |
        #  7 +----u----t----s----+--p---q--+
        #    |    |    |    |    |  |   |  |
        #  9 |    C----w----D    |  K---L  |
        #    |         |         |         |
        # 11 +---------+---------+---------+
        # (row)

        A = (5, 5)
        B = (5, 15)
        C = (9, 5)
        D = (9, 15)

        E = (1, 12)
        F = (1, 15)
        G = (3, 12)
        H = (3, 15)

        I = (5, 23)
        J = (5, 27)
        K = (9, 23)
        L = (9, 27)

        p = (7, 23)
        q = (7, 27)
        s = (7, 15)
        t = (7, 10)
        u = (7, 5)
        w = (9, 10)
        z = (5, 10)

        EFHG = Polygon([E, F, H, G, E])
        Aztu = Polygon([A, z, t, u, A])
        zBst = Polygon([z, B, s, t, z])
        tsDw = Polygon([t, s, D, w, t])
        utwC = Polygon([u, t, w, C, u])
        IJqp = Polygon([I, J, q, p, I])
        pqLK = Polygon([p, q, L, K, p])
        ABCD = Polygon([A, B, D, C, A])
        IJLK = Polygon([I, J, L, K, I])

        tiles = [tile1.identifier, tile2.identifier, tile3.identifier, tile4.identifier, tile5.identifier, tile6.identifier]
        tile_polygons = [[Aztu], [EFHG, zBst], [IJqp], [utwC], [tsDw], [pqLK]]

        polygons = SemanticMerger(1).merge(tiles, tile_polygons, topology)
        self.assertEqual(len(polygons), 3, "Number of found polygon")
        self.assertTrue(polygons[0].equals(ABCD), "ABCD polygon")
        self.assertTrue(polygons[1].equals(EFHG), "EFHG polygon")
        self.assertTrue(polygons[2].equals(IJLK), "IJLK polygon")
Пример #6
0
def generate_rbox(im_size, polys, tags):
    h, w = im_size
    poly_mask = np.zeros((h, w), dtype=np.uint8)
    score_map = np.zeros((h, w), dtype=np.uint8)
    geo_map = np.zeros((h, w, 5), dtype=np.float32)
    # mask used during traning, to ignore some hard areas
    training_mask = np.ones((h, w), dtype=np.uint8)
    for poly_idx, poly_tag in enumerate(zip(polys, tags)):
        poly = poly_tag[0]
        tag = poly_tag[1]

        r = [None, None, None, None]
        for i in range(4):
            r[i] = min(np.linalg.norm(poly[i] - poly[(i + 1) % 4]),
                       np.linalg.norm(poly[i] - poly[(i - 1) % 4]))
        # score map
        shrinked_poly = shrink_poly(poly.copy(),
                                    r).astype(np.int32)[np.newaxis, :, :]
        cv2.fillPoly(score_map, shrinked_poly, 1)
        cv2.fillPoly(poly_mask, shrinked_poly, poly_idx + 1)
        # if the poly is too small, then ignore it during training
        poly_h = min(np.linalg.norm(poly[0] - poly[3]),
                     np.linalg.norm(poly[1] - poly[2]))
        poly_w = min(np.linalg.norm(poly[0] - poly[1]),
                     np.linalg.norm(poly[2] - poly[3]))
        if min(poly_h, poly_w) < FLAGS.min_text_size:
            cv2.fillPoly(training_mask,
                         poly.astype(np.int32)[np.newaxis, :, :], 0)
        if tag:
            cv2.fillPoly(training_mask,
                         poly.astype(np.int32)[np.newaxis, :, :], 0)

        xy_in_poly = np.argwhere(poly_mask == (poly_idx + 1))
        # if geometry == 'RBOX':
        # 对任意两个顶点的组合生成一个平行四边形 - generate a parallelogram for any combination of two vertices
        fitted_parallelograms = []
        for i in range(4):
            p0 = poly[i]
            p1 = poly[(i + 1) % 4]
            p2 = poly[(i + 2) % 4]
            p3 = poly[(i + 3) % 4]
            edge = fit_line([p0[0], p1[0]], [p0[1], p1[1]])
            backward_edge = fit_line([p0[0], p3[0]], [p0[1], p3[1]])
            forward_edge = fit_line([p1[0], p2[0]], [p1[1], p2[1]])
            if point_dist_to_line(p0, p1, p2) > point_dist_to_line(p0, p1, p3):
                # 平行线经过p2 - parallel lines through p2
                if edge[1] == 0:
                    edge_opposite = [1, 0, -p2[0]]
                else:
                    edge_opposite = [edge[0], -1, p2[1] - edge[0] * p2[0]]
            else:
                # 经过p3 - after p3
                if edge[1] == 0:
                    edge_opposite = [1, 0, -p3[0]]
                else:
                    edge_opposite = [edge[0], -1, p3[1] - edge[0] * p3[0]]
            # move forward edge
            new_p0 = p0
            new_p1 = p1
            new_p2 = p2
            new_p3 = p3
            new_p2 = line_cross_point(forward_edge, edge_opposite)
            if point_dist_to_line(p1, new_p2, p0) > point_dist_to_line(
                    p1, new_p2, p3):
                # across p0
                if forward_edge[1] == 0:
                    forward_opposite = [1, 0, -p0[0]]
                else:
                    forward_opposite = [
                        forward_edge[0], -1, p0[1] - forward_edge[0] * p0[0]
                    ]
            else:
                # across p3
                if forward_edge[1] == 0:
                    forward_opposite = [1, 0, -p3[0]]
                else:
                    forward_opposite = [
                        forward_edge[0], -1, p3[1] - forward_edge[0] * p3[0]
                    ]
            new_p0 = line_cross_point(forward_opposite, edge)
            new_p3 = line_cross_point(forward_opposite, edge_opposite)
            fitted_parallelograms.append(
                [new_p0, new_p1, new_p2, new_p3, new_p0])
            # or move backward edge
            new_p0 = p0
            new_p1 = p1
            new_p2 = p2
            new_p3 = p3
            new_p3 = line_cross_point(backward_edge, edge_opposite)
            if point_dist_to_line(p0, p3, p1) > point_dist_to_line(p0, p3, p2):
                # across p1
                if backward_edge[1] == 0:
                    backward_opposite = [1, 0, -p1[0]]
                else:
                    backward_opposite = [
                        backward_edge[0], -1, p1[1] - backward_edge[0] * p1[0]
                    ]
            else:
                # across p2
                if backward_edge[1] == 0:
                    backward_opposite = [1, 0, -p2[0]]
                else:
                    backward_opposite = [
                        backward_edge[0], -1, p2[1] - backward_edge[0] * p2[0]
                    ]
            new_p1 = line_cross_point(backward_opposite, edge)
            new_p2 = line_cross_point(backward_opposite, edge_opposite)
            fitted_parallelograms.append(
                [new_p0, new_p1, new_p2, new_p3, new_p0])
        areas = [Polygon(t).area for t in fitted_parallelograms]
        parallelogram = np.array(fitted_parallelograms[np.argmin(areas)][:-1],
                                 dtype=np.float32)
        # sort thie polygon
        parallelogram_coord_sum = np.sum(parallelogram, axis=1)
        min_coord_idx = np.argmin(parallelogram_coord_sum)
        parallelogram = parallelogram[[
            min_coord_idx, (min_coord_idx + 1) % 4, (min_coord_idx + 2) % 4,
            (min_coord_idx + 3) % 4
        ]]

        rectange = rectangle_from_parallelogram(parallelogram)
        rectange, rotate_angle = sort_rectangle(rectange)

        p0_rect, p1_rect, p2_rect, p3_rect = rectange
        for y, x in xy_in_poly:
            point = np.array([x, y], dtype=np.float32)
            # top
            geo_map[y, x, 0] = point_dist_to_line(p0_rect, p1_rect, point)
            # right
            geo_map[y, x, 1] = point_dist_to_line(p1_rect, p2_rect, point)
            # down
            geo_map[y, x, 2] = point_dist_to_line(p2_rect, p3_rect, point)
            # left
            geo_map[y, x, 3] = point_dist_to_line(p3_rect, p0_rect, point)
            # angle
            geo_map[y, x, 4] = rotate_angle
    return score_map, geo_map, training_mask
Пример #7
0
def _create_polygons(df):
    from shapely.geometry import Polygon

    return [Polygon(_get_vertices(df, area)) for area in df.index]
def get_polygons(confidence, size, input):
    """
    Makes a list of Polygon objects which we can use to download the appropriate tiles

    :param confidence: Confidence that the object in the dataset has been accurately identified. 3 is low confidence, 2 is medium, and 1 is high confidence
    :return: List of Polygon objects denoting the coordinates of the objects
    """

    logging.info("Beginning reading of AoI polygons")

    coordlist = []

    # Open file with object locations and confidence
    file = open(str(input), "r")
    contents = json.loads(file.read())
    logging.debug("Input GeoJSON file: %s" % contents)
    # Iterate through each object, making a polygon for each one and adding it to coordlist

    count = 0
    for feat in tqdm(contents['features'],
                     desc='Identifying hit polygons',
                     unit='polygon'):

        if 'Confidence' in feat['properties'].keys():
            classification = feat['properties']['Confidence']
        else:
            logging.debug("No confidence variable found")
            classification = 1
        if classification <= confidence:
            coord = feat['geometry']['coordinates']
            logging.debug(feat['geometry']['type'])
            if feat['geometry']['type'] == "MultiPolygon" or feat['geometry'][
                    'type'] == 'Polygon':

                polygon_coords = Polygon(format_coords(coord))
                centroid = polygon_coords.centroid
                centre = [centroid.x, centroid.y]

            elif feat['geometry']['type'] == 'Point':
                centre = list(coord)
            else:
                raise TypeError("Unsupported geometry type")
            # TODO: Add functionality for lines

            logging.debug("Centre: %s" % centre)

            point = Point(centre[1], centre[0])

            # If Areas of Interest are too close together, then we skip to avoid duplicate dataset elements
            if check_duplicates(point, coordlist):
                continue

            try:
                polygon_coords = square_polygon(centre[1], centre[0], size)
            except:
                raise SyntaxError("Geojson must use lat/long coordinates")

            coordlist.append((count, polygon_coords, classification))
            count += 1

    return coordlist
Пример #9
0
    def plot(self, ax, m, lutfn=None, default_polygon_color='grey', **kwargs):
        '''
        Plots a shapefile. This function assumes that a shapefile containing polygonal
        data will have an attribute column named 'SYMBOL', which is used to pick corresponding
        color values from the colour lookup table, described in function processLUT.

        :param ax: plot axis
        :param m: basemap instance
        :param lutfn: colour look-up-table file name, which if not provided, polygons
                      are coloured by the default colour (default_polygon_color). This
                      parameter is ignored for shapefiles that contain only line data
        :param default_polygon_color: default color for polygons; overridden by colors
                                      provided in look-up-table, if given
        :param kwargs: list of relevant matplotlib arguments, e.g. alpha, zorder, color, etc.
        :return:
            legend_handles: legend handles for polygonal data; empty list for line data
            legend_labels: symbol names for polygonal data; empty list for line data
        '''

        # Populate lookup table
        self.processLUT(lutfn)

        patches = []
        legend_handles = []
        legend_labels = []
        handles = set()

        ecolor_is_fcolor = False
        if ('edgecolor' in kwargs.keys() and kwargs['edgecolor'] == 'face'):
            ecolor_is_fcolor = True
        # Process geometry
        for i, feature in enumerate(self._geometries):
            fcolor = None
            symbol = ''
            if (self._hasLUT):
                symbol = self._properties[i][self._symbolkey]
                fcolor = self._lutDict[symbol]
            if (fcolor == []): fcolor = default_polygon_color

            if (isinstance(feature, Polygon)):
                polygon = feature
                x, y = polygon.exterior.coords.xy
                if m is None:
                    px, py = x, y
                else:
                    px, py = m(x, y)
                ppolygon = Polygon(zip(px, py))

                if (fcolor is not None): kwargs['facecolor'] = fcolor
                if ('edgecolor' not in kwargs.keys() and not ecolor_is_fcolor):
                    kwargs['edgecolor'] = 'none'
                else:
                    kwargs['edgecolor'] = fcolor
                if ('fill') not in kwargs.keys(): kwargs['fill'] = True

                pp = PolygonPatch(ppolygon, **kwargs)
                patches.append(pp)

                # filter duplicates
                if (symbol not in handles):
                    handles.add(symbol)
                    legend_handles.append(pp)
                    legend_labels.append(symbol)

            elif (isinstance(feature, MultiPolygon)):
                multiPolygon = feature

                for polygon in multiPolygon:
                    x, y = polygon.exterior.coords.xy
                    if m is None:
                        px, py = x, y
                    else:
                        px, py = m(x, y)
                    ppolygon = Polygon(zip(px, py))

                    if (fcolor is not None): kwargs['facecolor'] = fcolor
                    if ('edgecolor' not in kwargs.keys()
                            and not ecolor_is_fcolor):
                        kwargs['edgecolor'] = 'none'
                    else:
                        kwargs['edgecolor'] = fcolor
                    if ('fill') not in kwargs.keys(): kwargs['fill'] = True

                    pp = PolygonPatch(ppolygon, **kwargs)
                    patches.append(pp)

                    # filter duplicates
                    if (symbol not in handles):
                        handles.add(symbol)
                        legend_handles.append(pp)
                        legend_labels.append(symbol)
                # end for
            elif (isinstance(feature, LineString)):
                line = feature
                x, y = line.coords.xy
                if m is None:
                    px, py = x, y
                else:
                    px, py = m(x, y)
                ax.plot(px, py, **kwargs)
            # end if
        # end for
        if (len(patches)):
            ax.add_collection(PatchCollection(patches, match_original=True))

        return legend_handles, legend_labels
Пример #10
0
def get_way_attrs(id, tags, buffer):
    way = g_way(id)
    if is_road(way):
        way['type'] = 'way'
        if not tags:
            all_w_buildings = query_way_buildings(id)
            w_buildings = filter(
                lambda x: not buffer or Point(x['center_lon'], x['center_lat'])
                .within(buffer), all_w_buildings)

            shape = LineString(g_way_geom(id))

            all_building_polygons = {
                b['id']: Polygon(g_way_geom(b['id']))
                for b in w_buildings
            }
            w_buildings = filter(
                lambda b: not filter_building(shape, b, all_building_polygons),
                w_buildings)

            way['buildings'] = map(lambda x: 'way/{}'.format(x['id']),
                                   w_buildings)

            all_intersects = query_intersect_ways(id)
            w_intersects = filter(
                lambda (w, cuts): not buffer or nodes_in_buffer(cuts, buffer),
                all_intersects.items())
            way['intersect'] = map(lambda x: 'way/{}'.format(x[0]),
                                   w_intersects)

            for node in query_way_elms(id):
                p = Point(node['lon'], node['lat'])
                if not buffer or p.within(buffer):
                    if 'contains' not in way:
                        way['contains'] = []
                    n_key = _elm_key(node, MATCH_TAGS)
                    if n_key is None:
                        n_key = 'node'
                    way['contains'].append({
                        'id': 'node/{}'.format(node['id']),
                        'type': n_key
                    })

    elif is_building(way):
        way['type'] = 'building'
        if not tags:
            for node in query_building_elms(way):
                p = Point(node['lon'], node['lat'])
                if not buffer or p.within(buffer):
                    if 'contains' not in way:
                        way['contains'] = []
                    n_key = _elm_key(node, MATCH_TAGS)
                    if n_key is None:
                        n_key = 'node'
                    way['contains'].append({
                        'id': 'node/{}'.format(node['id']),
                        'type': n_key
                    })

            way['ways'] = map(lambda x: 'way/{}'.format(x['id']),
                              query_building_ways(id))
            surr = []
            way['surrounding_buildings'] = surr
            for w in query_surrounding_buildings(id):
                if w['id'] == int(id):
                    way['center'] = {
                        'lat': w['center_lat'],
                        'lon': w['center_lon']
                    }
                else:
                    p = Point(w['center_lon'], w['center_lat'])
                    if not buffer or p.within(buffer):
                        surr.append('way/{}'.format(w['id']))

            way['areas'] = map(
                lambda aid: 'area/{}'.format(aid),
                g_coord_area(way['center']['lat'], way['center']['lon']))

    if 'nd' in way:
        del way['nd']
    return way
Пример #11
0
def bbox_iou(box1, box2, x1y1x2y2=True):
    """
    Returns the IoU of two bounding boxes
    
    if not x1y1x2y2:
        # Transform from center and width to exact coordinates
        b1_x1, b1_x2 = box1[:, 0] - box1[:, 2] / 2, box1[:, 0] + box1[:, 2] / 2
        b1_y1, b1_y2 = box1[:, 1] - box1[:, 3] / 2, box1[:, 1] + box1[:, 3] / 2
        b2_x1, b2_x2 = box2[:, 0] - box2[:, 2] / 2, box2[:, 0] + box2[:, 2] / 2
        b2_y1, b2_y2 = box2[:, 1] - box2[:, 3] / 2, box2[:, 1] + box2[:, 3] / 2
    else:
        # Get the coordinates of bounding boxes
        b1_x1, b1_y1, b1_x2, b1_y2 = box1[:, 0], box1[:, 1], box1[:, 2], box1[:, 3]
        b2_x1, b2_y1, b2_x2, b2_y2 = box2[:, 0], box2[:, 1], box2[:, 2], box2[:, 3]
    

    # get the corrdinates of the intersection rectangle
    inter_rect_x1 = torch.max(b1_x1, b2_x1)
    inter_rect_y1 = torch.max(b1_y1, b2_y1)
    inter_rect_x2 = torch.min(b1_x2, b2_x2)
    inter_rect_y2 = torch.min(b1_y2, b2_y2)
    # Intersection area
    inter_area = torch.clamp(inter_rect_x2 - inter_rect_x1 + 1, min=0) * torch.clamp(
        inter_rect_y2 - inter_rect_y1 + 1, min=0
    )
    # Union Area
    b1_area = (b1_x2 - b1_x1 + 1) * (b1_y2 - b1_y1 + 1)
    b2_area = (b2_x2 - b2_x1 + 1) * (b2_y2 - b2_y1 + 1)

    iou = inter_area / (b1_area + b2_area - inter_area + 1e-16)

    """

    # 已知4个角点求IOU-scores
    # b1xc = box1[:, 0]
    # b1yc = box1[:, 1]
    # b1x1 = b1xc + box1[:, 2]
    # b1y1 = b1yc + box1[:, 3]
    # b1x2 = b1xc + box1[:, 4]
    # b1y2 = b1yc + box1[:, 5]
    # b1x3 = b1xc + box1[:, 6]
    # b1y3 = b1yc + box1[:, 7]
    # b1x4 = b1xc + box1[:, 8]
    # b1y4 = b1yc + box1[:, 9]

    # b2xc = box2[:, 0]
    # b2yc = box2[:, 1]
    # b2x1 = b2xc + box2[:, 2]
    # b2y1 = b2yc + box2[:, 3]
    # b2x2 = b2xc + box2[:, 4]
    # b2y2 = b2yc + box2[:, 5]
    # b2x3 = b2xc + box2[:, 6]
    # b2y3 = b2yc + box2[:, 7]
    # b2x4 = b2xc + box2[:, 8]
    # b2y4 = b2yc + box2[:, 9]

    ious = []
    for i in range(box1.size(0)):
        b1xc = box1[i, 0]
        b1yc = box1[i, 1]
        b1x1 = b1xc + box1[i, 2]
        b1y1 = b1yc + box1[i, 3]
        b1x2 = b1xc + box1[i, 4]
        b1y2 = b1yc + box1[i, 5]
        b1x3 = b1xc + box1[i, 6]
        b1y3 = b1yc + box1[i, 7]
        b1x4 = b1xc + box1[i, 8]
        b1y4 = b1yc + box1[i, 9]

        b2xc = box2[i, 0]
        b2yc = box2[i, 1]
        b2x1 = b2xc + box2[i, 2]
        b2y1 = b2yc + box2[i, 3]
        b2x2 = b2xc + box2[i, 4]
        b2y2 = b2yc + box2[i, 5]
        b2x3 = b2xc + box2[i, 6]
        b2y3 = b2yc + box2[i, 7]
        b2x4 = b2xc + box2[i, 8]
        b2y4 = b2yc + box2[i, 9]

        line1 = np.concatenate(b1x1.numpy(), b1y1.numpy(), b1x2.numpy(), b1y2.numpy(),\
            b1x3.numpy(), b1y3.numpy(), b1x4.numpy(), b1y4.numpy()).reshape(4,2)
        poly1 = Polygon(line1).convex_hull

        line2 = np.concatenate(b2x1.numpy(), b2y1.numpy(), b2x2.numpy(), b2y2.numpy(),\
            b2x3.numpy(), b2y3.numpy(), b2x4.numpy(), b2y4.numpy()).reshape(4,2)
        poly2 = Polygon(line2).convex_hull

        union_poly = np.concatenate((line1,line2))

        inter_area = poly1.intersection(poly2).area  #相交面积

        #union_area = poly1.area + poly2.area - inter_area
        union_area = MultiPoint(union_poly).convex_hull.area
        
        if union_area == 0:
            iou= 0
        else:
            iou=float(inter_area) / union_area
        ious += iou

    return torch.Tensor(iou)
Пример #12
0
def get_area_multipolygon(id):
    area_geoms = g_area_geom(str(id))
    area_polys = [Polygon(points) for points in area_geoms]
    return MultiPolygon(area_polys)
Пример #13
0
def g_area_geom(id):
    debug('Area geometry of ' + str(id))

    def transform_tag_value(k, v):
        if k == 'admin_level':
            return int(v)
        return u'"{}"'.format(v)

    result = api.query("""area({});out;""".format(id))

    area_tags = result.areas[0].tags
    area_tags = {
        key: transform_tag_value(key, v)
        for key, v in area_tags.items()
        if key in ['type', 'name:en', 'name', 'boundary', 'wikidata', 'is_in']
    }

    if 'name' not in area_tags:
        return []

    geom = api.query(u"""    
            area({});        
            rel(pivot);            
            out geom;
    """.format(id),
                     cache=False)

    if geom is None:
        return []

    tag_filters = u''.join(
        [u'["{}"={}]'.format(key, value) for key, value in area_tags.items()])

    if not geom.relations:
        geom = api.query(u"""    
                    rel{};            
                    out geom;
            """.format(tag_filters),
                         cache=False)

    if geom.relations:
        area_poly = relation_multipolygon(geom.relations)
    else:
        geom = api.query(u"""
                    area({});
                    way(pivot);            
                    out geom;
                    >;
                    out skel qt;
            """.format(id),
                         cache=False)
        all_points = list(geom.ways).pop().nodes
        area_poly = Polygon(
            map(lambda n: (float(n.lon), float(n.lat)), all_points))

    if isinstance(area_poly, Polygon):
        area_poly = MultiPolygon([area_poly])

    return [
        map(lambda x: tuple(x[0]), zip(p.exterior.coords)) for p in area_poly
    ]
Пример #14
0
            print "trure"
            return True
    print "false"
    return False


def checkifPointisinbound(polygonpoints, point):
    p2 = Point(point)
    for poly in polygonpoints:
        p1 = Polygon(poly)
        if p1.contains(p2):
            print "true"
            return True
    print "false"
    return False


#
#p1=Polygon(polygonpoints)
#p2=LineString([(-1.0, -1.0), (0.0, 6.0)])

#polysOneLine(polygonpoints, [(-1.0, -1.0), (0.0, 6.0)])

checkifPointisinbound(polygonpoints, (7, 7))

p1 = Polygon(polygonpoints[0])
#print p1.contains(Point(1,2))
#[[(-1.0, -1.0), (0.0, 6.0), (1.0, 6.0), (2.0, 2.0), (4.0, 2.0), (4.0, 3.0), (4.0, 4.0)]]

#print p2.intersects(p1)
Пример #15
0
def repair_invalid(polygon, scale=None, rtol=.5):
    """
    Given a shapely.geometry.Polygon, attempt to return a
    valid version of the polygon through buffering tricks.

    Parameters
    -----------
    polygon: shapely.geometry.Polygon object
    rtol:    float, how close does a perimeter have to be
    scale:   float, or None

    Returns
    ----------
    repaired: shapely.geometry.Polygon object

    Raises
    ----------
    ValueError: if polygon can't be repaired
    """
    if hasattr(polygon, 'is_valid') and polygon.is_valid:
        return polygon

    # basic repair involves buffering the polygon outwards
    # this will fix a subset of problems.
    basic = polygon.buffer(tol.zero)
    # if it returned multiple polygons check the largest
    if util.is_sequence(basic):
        basic = basic[np.argmax([i.area for i in basic])]

    # check perimeter of result agains original perimeter
    if basic.is_valid and np.isclose(basic.length,
                                     polygon.length,
                                     rtol=rtol):
        return basic

    if scale is None:
        distance = tol.buffer * polygon_scale(polygon)
    else:
        distance = tol.buffer * scale

    # if there are no interiors, we can work with just the exterior
    # ring, which is often more reliable
    if len(polygon.interiors) == 0:
        # try buffering the exterior of the polygon
        # the interior will be offset by -tol.buffer
        rings = polygon.exterior.buffer(distance).interiors
        if len(rings) == 1:
            # reconstruct a single polygon from the interior ring
            recon = Polygon(shell=rings[0]).buffer(distance)
            # check perimeter of result agains original perimeter
            if recon.is_valid and np.isclose(recon.length,
                                             polygon.length,
                                             rtol=rtol):
                return recon

    # buffer and unbuffer the whole polygon
    buffered = polygon.buffer(distance).buffer(-distance)
    # if it returned multiple polygons check the largest
    if util.is_sequence(buffered):
        buffered = buffered[np.argmax([i.area for i in buffered])]
    # check perimeter of result agains original perimeter
    if buffered.is_valid and np.isclose(buffered.length,
                                        polygon.length,
                                        rtol=rtol):
        log.debug('Recovered invalid polygon through double buffering')
        return buffered

    raise ValueError('unable to recover polygon!')
Пример #16
0
    def plotlocal(self,
                  epsg_from,
                  epsg_to,
                  centre_shift=[0., 0.],
                  ax=None,
                  map_scale='m',
                  **kwargs):
        '''
        Plots a shapefile as lines in local coordinates (for overlaying on
        existing depth slice plotting functions, for example)
        :epsg_from: epsg the sh in
        :epsg_to: epsg you would like the output to be plotted in
        :centre_shift: option to shift by [x,y] to convert (for example) to 
        :ax: axes instance to plot on
        :map_scale: 'km' or 'm' - scale of map we are plotting onto
        :kwargs: key word arguments to the matplotlib plot function
        local coordinates.
        :return:
        '''
        # set default line colour to black
        if 'color' not in kwargs.keys():
            kwargs['color'] = 'k'

        if ax is None:
            ax = plt.subplot(111)

        if map_scale == 'km':
            scale_factor = 1000.
        else:
            scale_factor = 1.

        patches = []
        legend_handles = []
        legend_labels = []
        handles = set()

        ecolor_is_fcolor = False
        if ('edgecolor' in kwargs.keys() and kwargs['edgecolor'] == 'face'):
            ecolor_is_fcolor = True
        # Process geometry
        for i, feature in enumerate(self._geometries):
            fcolor = None
            symbol = ''
            if (self._hasLUT):
                symbol = self._properties[i][self._symbolkey]
                fcolor = self._lutDict[symbol]
            if (fcolor == []): fcolor = default_polygon_color

            if (isinstance(feature, Polygon)):
                polygon = feature
                x, y = polygon.exterior.coords.xy
                px, py = self._xy_to_local(x, y, epsg_from, epsg_to,
                                           centre_shift, scale_factor)
                ppolygon = Polygon(zip(px, py))

                if (fcolor is not None): kwargs['facecolor'] = fcolor
                if ('edgecolor' not in kwargs.keys() and not ecolor_is_fcolor):
                    kwargs['edgecolor'] = 'none'
                else:
                    kwargs['edgecolor'] = fcolor
                if ('fill') not in kwargs.keys(): kwargs['fill'] = True

                pp = PolygonPatch(ppolygon, **kwargs)
                patches.append(pp)

                # filter duplicates
                if (symbol not in handles):
                    handles.add(symbol)
                    legend_handles.append(pp)
                    legend_labels.append(symbol)

            elif (isinstance(feature, MultiPolygon)):
                multiPolygon = feature

                for polygon in multiPolygon:
                    x, y = polygon.exterior.coords.xy
                    px, py = self._xy_to_local(x, y, epsg_from, epsg_to,
                                               centre_shift, scale_factor)
                    ppolygon = Polygon(zip(px, py))

                    if (fcolor is not None): kwargs['facecolor'] = fcolor
                    if ('edgecolor' not in kwargs.keys()
                            and not ecolor_is_fcolor):
                        kwargs['edgecolor'] = 'none'
                    else:
                        kwargs['edgecolor'] = fcolor
                    if ('fill') not in kwargs.keys(): kwargs['fill'] = True

                    pp = PolygonPatch(ppolygon, **kwargs)
                    patches.append(pp)

                    # filter duplicates
                    if (symbol not in handles):
                        handles.add(symbol)
                        legend_handles.append(pp)
                        legend_labels.append(symbol)
                        # end for
            elif (isinstance(feature, LineString)):
                line = feature
                x, y = line.coords.xy
                px, py = self._xy_to_local(x, y, epsg_from, epsg_to,
                                           centre_shift, scale_factor)
                ax.plot(px, py, **kwargs)
                # end if
        # end for
        if (len(patches)):
            ax.add_collection(PatchCollection(patches, match_original=True))

        return ax, legend_handles, legend_labels
def plot_grid_world(episode_df, inner, outer, scale=10.0, plot=True):
    """
    plot a scaled version of lap, along with throttle taken a each position
    """
    stats = []
    outer = [(val[0] / scale, val[1] / scale) for val in outer]
    inner = [(val[0] / scale, val[1] / scale) for val in inner]

    max_x = int(np.max([val[0] for val in outer]))
    max_y = int(np.max([val[1] for val in outer]))

    print(max_x, max_y)
    grid = np.zeros((max_x + 1, max_y + 1))

    # create shapely ring for outter and inner
    outer_polygon = Polygon(outer)
    inner_polygon = Polygon(inner)

    print('Outer polygon length = %.2f (meters)' %
          (outer_polygon.length / scale))
    print('Inner polygon length = %.2f (meters)' %
          (inner_polygon.length / scale))

    dist = 0.0
    for ii in range(1, len(episode_df)):
        dist += math.sqrt(
            (episode_df['x'].iloc[ii] - episode_df['x'].iloc[ii - 1])**2 +
            (episode_df['y'].iloc[ii] - episode_df['y'].iloc[ii - 1])**2)
    dist /= 100.0

    t0 = datetime.fromtimestamp(float(episode_df['timestamp'].iloc[0]))
    t1 = datetime.fromtimestamp(
        float(episode_df['timestamp'].iloc[len(episode_df) - 1]))

    lap_time = (t1 - t0).total_seconds()

    average_throttle = np.nanmean(episode_df['throttle'])
    max_throttle = np.nanmax(episode_df['throttle'])
    min_throttle = np.nanmin(episode_df['throttle'])
    velocity = dist / lap_time

    print('Distance, lap time = %.2f (meters), %.2f (sec)' % (dist, lap_time))
    print('Average throttle, velocity = %.2f (Gazebo), %.2f (meters/sec)' %
          (average_throttle, velocity))

    stats.append((dist, lap_time, velocity, average_throttle, min_throttle,
                  max_throttle))

    if plot == True:
        for y in range(max_y):
            for x in range(max_x):
                point = Point((x, y))

                # this is the track
                if (not inner_polygon.contains(point)) and (
                        outer_polygon.contains(point)):
                    grid[x][y] = -1.0

                # find df slice that fits into this
                df_slice = episode_df[(episode_df['x'] >= (x - 1) * scale) & (episode_df['x'] < x * scale) & \
                                   (episode_df['y'] >= (y - 1) * scale) & (episode_df['y'] < y * scale)]

                if len(df_slice) > 0:
                    #average_throttle = np.nanmean(df_slice['throttle'])
                    grid[x][y] = np.nanmean(df_slice['throttle'])

        fig = plt.figure(figsize=(7, 7))
        imgplot = plt.imshow(grid)
        plt.colorbar(orientation='vertical')
        plt.title('Lap time (sec) = %.2f' % lap_time)
        #plt.savefig('grid.png')

    return lap_time, average_throttle, stats
Пример #18
0
def VectorizeSwathPolygons(params, processes=1, **kwargs):
    """
    Vectorize spatial units' polygons
    """

    # parameters = ValleyBottomParameters()
    # parameters.update({key: kwargs[key] for key in kwargs.keys() & parameters.keys()})
    # kwargs = {key: kwargs[key] for key in kwargs.keys() - parameters.keys()}
    # params = SwathMeasurementParams(**parameters)

    defs = ReadSwathsBounds(params)

    def arguments():

        for (axis, gid), (measure, bounds) in defs.items():
            yield (
                VectorizeOneSwathPolygon,
                axis,
                gid,
                measure,
                bounds,
                params,
                kwargs
            )

    output = params.output_swaths_shapefile.filename(tileset=None)
    # config.filename(params.output_swaths_shapefile, mod=False)

    schema = {
        'geometry': 'Polygon',
        'properties': [
            ('GID', 'int'),
            ('AXIS', 'int:4'),
            ('VALUE', 'int:4'),
            # ('ROW', 'int:3'),
            # ('COL', 'int:3'),
            ('M', 'float:10.2')
        ]
    }
    crs = fiona.crs.from_epsg(2154)
    options = dict(driver='ESRI Shapefile', crs=crs, schema=schema)

    with fiona.open(output, 'w', **options) as dst:

        with Pool(processes=processes) as pool:

            pooled = pool.imap_unordered(starcall, arguments())

            with click.progressbar(pooled, length=len(defs)) as iterator:
                for axis, gid, measure, polygons in iterator:
                    for (polygon, value) in polygons:

                        geom = asShape(polygon)
                        exterior = Polygon(geom.exterior).buffer(0)

                        feature = {
                            'geometry': exterior.__geo_interface__,
                            'properties': {
                                'GID': int(gid),
                                'AXIS': int(axis),
                                'VALUE': int(value),
                                # 'ROW': row,
                                # 'COL': col,
                                'M': float(measure)
                            }
                        }

                        dst.write(feature)

                        for ring in geom.interiors:

                            if not exterior.contains(ring):

                                feature = {
                                    'geometry': Polygon(ring).buffer(0).__geo_interface__,
                                    'properties': {
                                        'GID': int(gid),
                                        'AXIS': int(axis),
                                        'VALUE': int(value),
                                        # 'ROW': row,
                                        # 'COL': col,
                                        'M': float(measure)
                                    }
                                }

                                dst.write(feature)
    def test_AreaTablesAreaInterpolate(self):
        polys1 = gpd.GeoSeries([
            Polygon([(0, 0), (10, 0), (10, 5), (0, 5)]),
            Polygon([(0, 5), (0, 10), (10, 10), (10, 5)])
        ])

        polys2 = gpd.GeoSeries([
            Polygon([(0, 0), (5, 0), (5, 7), (0, 7)]),
            Polygon([(5, 0), (5, 10), (10, 10), (10, 0)]),
            Polygon([(0, 7), (0, 10), (5, 10), (5, 7)])
        ])

        df1 = gpd.GeoDataFrame({'geometry': polys1})
        df2 = gpd.GeoDataFrame({'geometry': polys2})
        df1['population'] = [500, 200]
        df1['pci'] = [75, 100]
        df1['income'] = df1['population'] * df1['pci']

        res_union = gpd.overlay(df1, df2, how='union')

        result_area = area_tables(df1, res_union)
        result_area_binning = area_tables_binning(df1, res_union)

        np.testing.assert_almost_equal(result_area[0],
                                       np.array([[25., 25., 0., 0., 0.],
                                                 [0., 0., 10., 15., 25.]]),
                                       decimal=3)

        np.testing.assert_almost_equal(result_area[1],
                                       np.array([[1., 0., 0., 0., 0.],
                                                 [0., 0., 1., 0., 0.],
                                                 [0., 1., 0., 0., 0.],
                                                 [0., 0., 0., 0., 1.],
                                                 [0., 0., 0., 1., 0.]]),
                                       decimal=3)

        np.testing.assert_almost_equal(result_area_binning.toarray(),
                                       np.array([[25., 0., 25., 0., 0.],
                                                 [0., 10., 0., 25., 15.]]),
                                       decimal=3)

        result_inte = area_interpolate(
            df1,
            res_union,
            extensive_variables=['population', 'income'],
            intensive_variables=['pci'])
        result_inte_binning = area_interpolate_binning(
            df1,
            res_union,
            extensive_variables=['population', 'income'],
            intensive_variables=['pci'])

        np.testing.assert_almost_equal(result_inte[0],
                                       np.array([[250., 18750.], [40., 4000.],
                                                 [250.,
                                                  18750.], [100., 10000.],
                                                 [60., 6000.]]),
                                       decimal=3)

        np.testing.assert_almost_equal(result_inte[1],
                                       np.array([[75.], [100.], [75.], [100.],
                                                 [100.]]),
                                       decimal=3)

        np.testing.assert_almost_equal(result_inte_binning[0],
                                       np.array([[250., 18750.],
                                                 [39.99999762, 3999.99976158],
                                                 [250., 18750.],
                                                 [100., 10000.],
                                                 [59.99999642,
                                                  5999.99964237]]),
                                       decimal=3)

        np.testing.assert_almost_equal(result_inte_binning[1],
                                       np.array([[75.], [100.], [75.], [100.],
                                                 [100.]]),
                                       decimal=3)
Пример #20
0
    def aggregateByGrid(df, field, summary, gridSize):
        """
        Aggregates the specified field with chosen summary type and user
            defined grid size. returns aggregated grids with summary

        Parameters
        ----------
        df : geopandas dataframe
        field : string
            field to be summarized.
        summary : string
            type of summary to be sumarized. eg. min, max,sum, median
        gridSize : float
            the size of grid on same unit as geodataframe coordinates.

        Returns
        -------
        geodataframe
            Aggregated grids with summary on it

        """
        def round_down(num, divisor):
            return floor(num / divisor) * divisor

        def round_up(num, divisor):
            return ceil(num / divisor) * divisor

        # Get crs from data
        sourceCRS = df.crs
        targetCRS = {"init": "EPSG:3857"}
        # Reproject to Mercator\
        df = df.to_crs(targetCRS)
        # Get bounds
        xmin, ymin, xmax, ymax = df.total_bounds
        print(xmin, ymin, xmax, ymax)
        height, width = gridSize, gridSize
        top, left = round_up(ymax, height), round_down(xmin, width)
        bottom, right = round_down(ymin, height), round_up(xmax, width)

        rows = int((top - bottom) / height) + 1
        cols = int((right - left) / width) + 1

        XleftOrigin = left
        XrightOrigin = left + width
        YtopOrigin = top
        YbottomOrigin = top - height
        polygons = []
        for i in range(cols):
            Ytop = YtopOrigin
            Ybottom = YbottomOrigin
            for j in range(rows):
                polygons.append(
                    Polygon([(XleftOrigin, Ytop), (XrightOrigin, Ytop),
                             (XrightOrigin, Ybottom), (XleftOrigin, Ybottom)]))
                Ytop = Ytop - height
                Ybottom = Ybottom - height
            XleftOrigin = XleftOrigin + width
            XrightOrigin = XrightOrigin + width

        grid = gpd.GeoDataFrame({'geometry': polygons})
        grid.crs = df.crs

        # Assign gridid
        numGrid = len(grid)
        grid['gridId'] = list(range(numGrid))

        # Identify gridId for each point
        points_identified = gpd.sjoin(df, grid, op='within')

        # group points by gridid and calculate mean Easting,
        # store it as dataframe
        # delete if field already exists
        if field in grid.columns:
            del grid[field]
        grouped = points_identified.groupby('gridId')[field].agg(summary)
        grouped_df = pd.DataFrame(grouped)

        new_grid = grid.join(grouped_df, on='gridId').fillna(0)
        grid = new_grid.to_crs(sourceCRS)
        summarized_field = summary + "_" + field
        final_grid = grid.rename(columns={field: summarized_field})
        final_grid = final_grid[final_grid[summarized_field] > 0].sort_values(
            by=summarized_field, ascending=False)
        final_grid[summarized_field] = round(final_grid[summarized_field], 1)
        final_grid['x_centroid'], final_grid['y_centroid'] = \
            final_grid.geometry.centroid.x, final_grid.geometry.centroid.y
        return final_grid
Пример #21
0
def kps_to_polygon(kps):
    """
        Convert imgaug keypoints to shapely polygon
    """
    pts = [(kp.x, kp.y) for kp in kps]
    return Polygon(pts)
Пример #22
0
    def spatioTemporalAggregation(df, field, summary, gridSize):
        """
        Aggregates the given field on hour and weekday basis.
        Prepares data for mosaic plot

        Parameters
        ----------
        df : geopandas dataframe
        field : string
            field to be summarized.
        summary : string
            type of summary to be sumarized. eg. min, max,sum, median
        gridSize : float
            the size of grid on same unit as geodataframe coordinates.

        Returns
        -------
        geodataframes: one each for larger grid and other for subgrids
            (for visualization purpose only)
            Aggregated grids with summary on it

        """
        def round_down(num, divisor):
            return floor(num / divisor) * divisor

        def round_up(num, divisor):
            return ceil(num / divisor) * divisor

        # Get crs from data
        sourceCRS = df.crs
        targetCRS = {'init': "epsg:3857"}
        # Reproject to Mercator\
        df = df.to_crs(targetCRS)

        # Get bounds
        xmin, ymin, xmax, ymax = df.total_bounds
        height, width = gridSize, gridSize
        top, left = round_up(ymax, height), round_down(xmin, width)
        bottom, right = round_down(ymin, height), round_up(xmax, width)

        rows = int((top - bottom) / height) + 1
        cols = int((right - left) / width) + 1

        XleftOrigin = left
        XrightOrigin = left + width
        YtopOrigin = top
        YbottomOrigin = top - height
        polygons = []

        for i in range(cols):
            Ytop = YtopOrigin
            Ybottom = YbottomOrigin
            for j in range(rows):
                polygons.append(
                    Polygon([(XleftOrigin, Ytop), (XrightOrigin, Ytop),
                             (XrightOrigin, Ybottom), (XleftOrigin, Ybottom)]))
                Ytop = Ytop - height
                Ybottom = Ybottom - height
            XleftOrigin = XleftOrigin + width
            XrightOrigin = XrightOrigin + width

        grid = gpd.GeoDataFrame({'geometry': polygons})
        grid.crs = (targetCRS)

        # Assign gridid
        numGrid = len(grid)
        grid['gridId'] = list(range(numGrid))

        # Identify gridId for each point

        df['hour'] = df['time'].apply(lambda x: datetime.datetime.strptime(
            x, '%Y-%m-%dT%H:%M:%S')).dt.hour
        df['weekday'] = df['time'].apply(lambda x: datetime.datetime.strptime(
            x, '%Y-%m-%dT%H:%M:%S')).dt.dayofweek
        points_identified = gpd.sjoin(df, grid, op='within')

        # group points by gridid and calculate mean Easting,
        # store it as dataframe
        # delete if field already exists
        if field in grid.columns:
            del grid[field]

        # Aggregate by weekday, hour and grid
        grouped = points_identified.groupby(['gridId', 'weekday',
                                             'hour']).agg({field: [summary]})
        grouped = grouped.reset_index()
        grouped.columns = grouped.columns.map("_".join)
        modified_fieldname = field + "_" + summary

        # Create Subgrids
        subgrid, mainGrid, rowNum, columnNum, value = [], [], [], [], []
        unikGrid = grouped['gridId_'].unique()
        for currentGrid in unikGrid:
            dataframe = grid[grid['gridId'] == currentGrid]
            xmin, ymin, xmax, ymax = dataframe.total_bounds
            xminn, xmaxx, yminn, ymaxx = xmin + \
                (xmax-xmin)*0.05, xmax-(xmax-xmin)*0.05, ymin + \
                (ymax-ymin)*0.05, ymax-(ymax-ymin)*0.05
            rowOffset = (ymaxx - yminn) / 24.0
            colOffset = (xmaxx - xminn) / 7.0
            for i in range(7):
                for j in range(24):
                    topy, bottomy, leftx, rightx = ymaxx-j*rowOffset, ymaxx - \
                        (j+1)*rowOffset, xminn+i * \
                        colOffset, xminn+(i+1)*colOffset
                    subgrid.append(
                        Polygon([(leftx, topy), (rightx, topy),
                                 (rightx, bottomy), (leftx, bottomy)]))
                    mainGrid.append(currentGrid)
                    rowNum.append(j)
                    columnNum.append(i)
                    if len(grouped[(grouped['gridId_'] == currentGrid)
                                   & (grouped['weekday_'] == i)
                                   & (grouped['hour_'] == j)]) != 0:
                        this_value = grouped[
                            (grouped['gridId_'] == currentGrid)
                            & (grouped['weekday_'] == i)
                            & (grouped['hour_']
                               == j)].iloc[0][modified_fieldname]
                        value.append(this_value)
                    else:
                        value.append(np.nan)
        subgrid_gpd = gpd.GeoDataFrame({'geometry': subgrid})
        subgrid_gpd.crs = targetCRS
        # Reproject to Mercator\
        subgrid_gpd = subgrid_gpd.to_crs(sourceCRS)
        subgrid_gpd['gridId'] = mainGrid
        subgrid_gpd['Weekday'] = columnNum
        subgrid_gpd['hour'] = rowNum
        subgrid_gpd['gridId'] = subgrid_gpd.apply(lambda x: str(x[
            'gridId']) + "_" + str(x['Weekday']) + "_" + str(x['hour']),
                                                  axis=1)
        subgrid_gpd[modified_fieldname] = value
        subgrid_gpd = subgrid_gpd.dropna()
        grid = grid.to_crs(sourceCRS)
        grid = grid[grid['gridId'].isin(unikGrid)]
        return grid, subgrid_gpd
Пример #23
0
 def test_buffer(self):
     original = GeoSeries([Point(0, 0)])
     expected = GeoSeries(
         [Polygon(((5, 0), (0, -5), (-5, 0), (0, 5), (5, 0)))])
     calculated = original.buffer(5, resolution=1)
     assert geom_almost_equals(expected, calculated)
Пример #24
0
    def getInstallationAreaConstraints(self):
        """
        getInstallationAreaConstraints: used to identify the nogo zones associated with the
        water depth installation constraints of the specific machine.

        Args:
            self (class)

        Note:
            the method update the following self.S_data attributes:
                .NogoAreas_bathymetry: create a list of polygons associated with unfeasible and feasible areas
                .Bathymetry: flatten the bathymetry for the wave case to the average value
        """
        Bathymetry = self.S_data.Bathymetry
        # Bathymetry = np.array([-10])
        # print(Bathymetry)
        if (len(Bathymetry) == 1
                and (not Bathymetry >= self.M_data.InstalDepth[0]
                     or not Bathymetry <= self.M_data.InstalDepth[1])):
            errStr = ('Error[InstalDepth]:\nThe device installation '
                      'constraints do not fit the bathymetry of the area.'
                      "\nNo possible installation area has been found!")
            raise ValueError(
                errStr
            )  # there is no valid zone for the installation of the devices

        elif (len(Bathymetry) == 1 and Bathymetry >= self.M_data.InstalDepth[0]
              and Bathymetry <= self.M_data.InstalDepth[1]):
            self.S_data.NogoAreas_bathymetry = None  # the whole lease area can be used
            if self.M_data.tidalFlag:
                X, Y = np.meshgrid(self.S_data.MeteoceanConditions['x'],
                                   self.S_data.MeteoceanConditions['y'])
                Z = X * 0. + Bathymetry  # generate a flat bathymetry
                self.S_data.Bathymetry = np.vstack(
                    (X.ravel(), Y.ravel(), Z.ravel())).T
            pass
        else:
            (NoGo, unfeasible_points_mask) = get_unfeasible_regions(
                Bathymetry,
                self.M_data.InstalDepth,
                debug=self.debug,
                debug_plot=self.debug_plot)
            self.S_data.NogoAreas_bathymetry = NoGo

        if not self.M_data.tidalFlag:
            module_logger.warning('[Warning] The wave module cannot run with '
                                  'variable bathymetry\n'
                                  'The bathymetry is reduced to its average '
                                  'value.')
            module_logger.info('The averge bathymetry value is '
                               '{} m'.format(np.mean(Bathymetry[:, -1])))

            # calculate the average water depth withint he lease area and
            # outside the nogozones specified by the user
            active_area = Polygon(self.S_data.LeaseArea)
            mask_outofwater = Bathymetry[:,
                                         -1] <= 0  # true all points below swl
            mask_nan = np.logical_not(np.isnan(
                Bathymetry[:, -1]))  # true all valid points
            mask_lease = np.asarray([
                active_area.intersects(Point(el)) for el in Bathymetry[:, :-1]
            ])  # true all points inside
            mask_nogo = np.ones(mask_lease.shape, dtype='bool')

            if self.S_data.NogoAreas:
                if not isinstance(self.S_data.NogoAreas, list):
                    raise IOError('The nogo areas input by the user needs to '
                                  'be a list of numpy.ndarray')
                for el in self.S_data.NogoAreas:
                    nogo_poly = Polygon(el)
                    t = np.asarray([
                        not nogo_poly.intersects(Point(grid_p))
                        for grid_p in Bathymetry[:, :-1]
                    ])
                    mask_nogo *= t  # true all point outside

            bathymetry_mask = (
                mask_lease * mask_nan * mask_outofwater * mask_nogo
            )  # only true point satisfy the above conditions

            if Bathymetry[bathymetry_mask, -1].size == 0:
                raise ValueError("No valid points found within lease area. "
                                 "Check depths and exclusion zones")

            self.S_data._average_water_depth = np.mean(
                Bathymetry[bathymetry_mask, -1])

            return
Пример #25
0
 def setUp(self):
     self.geometry = Polygon([[0, 0], [0, 1], [1, 1], [0, 0]])
     self.field = BboxArrayField()
Пример #26
0
def check_collision(rect0, rect1):
    p1 = Polygon(tuple(map(tuple, rect0)))
    p2 = Polygon(tuple(map(tuple, rect1)))
    collision = (p1.intersection(p2).area > 0.0)
    return collision
Пример #27
0
#width = 4e-3
#height = 4e-3
#rows = int(np.ceil((ymax-ymin) /  height))
#cols = int(np.ceil((xmax-xmin) / width))

XleftOrigin = xmin
XrightOrigin = xmin + width
YtopOrigin = ymax
YbottomOrigin = ymax - height
polygons = []
for i in range(cols):
    Ytop = YtopOrigin
    Ybottom = YbottomOrigin
    for j in range(rows):
        polygons.append(
            Polygon([(XleftOrigin, Ytop), (XrightOrigin, Ytop),
                     (XrightOrigin, Ybottom), (XleftOrigin, Ybottom)]))
        Ytop = Ytop - height
        Ybottom = Ybottom - height
    XleftOrigin = XleftOrigin + width
    XrightOrigin = XrightOrigin + width

grid = gpd.GeoDataFrame({'geometry': polygons})
grid.crs = {'init': 'epsg:32610'}
grid = grid.to_crs(epsg=32610)

grid.to_file("grid.shp")
grid.to_file('grid.JSON', driver="GeoJSON")

#####
import geopandas as gpd
Пример #28
0
    def transform_coords(self):
        """ Sends polygon's coordinates to AI """

        figures = self.drawing_place.find_all()
        coords = []
        co = []
        tags = []
        if len(figures) >= 1:
            for fig in figures:
                tags.append(self.drawing_place.gettags(fig))
                co.append(utils.tuple_to_list(self.drawing_place.coords(fig)))
                coords.append(
                    utils.divide_coords(
                        utils.tuple_to_list(self.drawing_place.coords(fig)),
                        100))

            types = self.get_type_polygons()

            print(co)
            print(types)

            coordinates = []
            for shape in coords:
                poly = []
                for i in range(int(len(shape) / 2)):
                    poly.append(
                        [round(shape[i * 2], 2),
                         round(shape[i * 2 + 1], 2)])

                coordinates.append(poly)

            print(coordinates)

            polygons = []
            for shape in coordinates:
                polygons.append(Polygon(shape))

            # Output can be polygon OR multipolygon
            output = PolygonUtils.merge(polygons)
            print("Output:")
            print(output)

            multipolygon = []
            if not isinstance(output, Polygon):
                print("Multipolygon")
                multipolygon = list(output)
            else:
                multipolygon = [output]
                print("Polygon")

            CanvasUtils.reset_canvas(self.drawing_place, self.labels)

            # Convert check multipolygon and convert it into list of polygon
            for sub_ref in multipolygon:

                print(len(sub_ref.exterior.xy[0]))
                xy = []
                for i in range(len(sub_ref.exterior.xy[0])):
                    xy.append(sub_ref.exterior.xy[0][i] * 100)
                    xy.append(sub_ref.exterior.xy[1][i] * 100)

                CanvasPolygon(xy, 'black', "origin", self.drawing_place,
                              self.magnet_slider, False)
                print(xy)

            print(types)

            should_reset = SolvingThread.solve(output, types, self.window,
                                               self.progress)
            if should_reset:
                CanvasUtils.reset_canvas(self.drawing_place, self.labels)
                self.window.protocol("WM_DELETE_WINDOW", self.close_event)

            # utils.draw_node(shapes, state, ref)

        else:
            messagebox.showerror(
                "No polygons on canvas",
                "Please add at least one polygon before testing our AI")
Пример #29
0
    def plan(self, start_point, goal_point, obstacle_list, animation=False):
        """Plans path from start to goal avoiding obstacles.

        Args:
            start_point: tuple with start point coordinates.
            end_point: tuple with end point coordinates.
            obstacle_list: list of obstacles which themselves are list of points
            animation: flag for showing planning visualization (default False)

        Returns:
            A list of points representing the path determined from
            start to goal while avoiding obstacles.
            An list containing just the start point means path could not be planned.
        """

        # Initialize start and goal nodes
        start = Node.from_coordinates(start_point)
        goal_node = Node.from_coordinates(goal_point)

        # Initialize node_list with start
        node_list = [start]

        # Calculate distances between start and goal
        del_x, del_y = start.x - goal_node.x, start.y - goal_node.y
        distance_to_goal = math.sqrt(del_x**2 + del_y**2)

        # Loop to keep expanding the tree towards goal if there is no direct connection
        if check_intersection([start_point, goal_point], obstacle_list):
            while True:
                # Sample random point in specified area
                rnd_point = self.sampler(self.sample_area, goal_point,
                                         self.goal_sample_rate)

                # Find nearest node to the sampled point
                distance_list = [
                    (node.x - rnd_point[0])**2 + (node.y - rnd_point[1])**2
                    for node in node_list
                ]

                try:
                    # for python2
                    nearest_node_index = min(xrange(len(distance_list)),
                                             key=distance_list.__getitem__)
                except NameError:
                    # for python 3
                    nearest_node_index = distance_list.index(
                        min(distance_list))

                nearest_node = node_list[nearest_node_index]

                # Create new point in the direction of sampled point
                theta = math.atan2(rnd_point[1] - nearest_node.y,
                                   rnd_point[0] - nearest_node.x)
                new_point = (
                    nearest_node.x + self.expand_dis * math.cos(theta),
                    nearest_node.y + self.expand_dis * math.sin(theta),
                )

                # Check whether new point is inside an obstacles
                for obstacle in obstacle_list:
                    if Point(new_point).within(Polygon(obstacle)):
                        new_point = float("nan"), float("nan")
                        continue

                # Expand tree
                if math.isnan(new_point[0]):
                    continue
                else:
                    new_node = Node.from_coordinates(new_point)
                    new_node.parent = nearest_node
                    node_list.append(new_node)

                # Check if goal has been reached or if there is direct connection to goal
                del_x, del_y = new_node.x - goal_node.x, new_node.y - goal_node.y
                distance_to_goal = math.sqrt(del_x**2 + del_y**2)

                if distance_to_goal < self.expand_dis or not check_intersection(
                    [new_node.to_tuple(),
                     goal_node.to_tuple()], obstacle_list):
                    goal_node.parent = node_list[-1]
                    node_list.append(goal_node)
                    print("Goal reached!")
                    break

        else:
            goal_node.parent = start
            node_list = [start, goal_node]

        # Construct path by traversing backwards through the tree
        path = []
        last_node = node_list[-1]

        while node_list[node_list.index(last_node)].parent is not None:
            node = node_list[node_list.index(last_node)]
            path.append(node.to_tuple())
            last_node = node.parent
        path.append(start.to_tuple())

        if animation is True:
            RRT.visualize_tree(node_list, obstacle_list)

        return list(reversed(path)), node_list
import numpy as np
from collections import defaultdict
from netCDF4 import Dataset
from scipy.interpolate import griddata

from shapely.geometry import Point, Polygon

from datetime import datetime

import pdb

import iris

# CMOPRH is 0 to 360 longitude . . .

polygon = Polygon(((73., 21.), (83., 16.), (87., 22.), (75., 27.)))

pcp_dom, longitude_dom, latitude_dom, time_dom = pickle.load(
    open(
        '/nfs/a90/eepdw/Data/Saved_data/CMORPH/cmorph_emb_time_update_large.p',
        'rb'))

# Load land sea mask.  TRMM land sea mask is in % of water coverage so 100% is all water
#
#
#
#nc = Dataset('/nfs/a90/eepdw/Data/Observations/Satellite/TRMM/TMPA_mask.nc')

#lsm_lons, lsm_lats = np.meshgrid(nc.variables['lon'][:],nc.variables['lat'][:])

lsm_cube = iris.load_cube('/nfs/a90/eepdw/Data/EMBRACE/dkbh/dkbhu/30.pp',