Beispiel #1
0
 def __add__(self, other):
     new_path = shgeo.LineString(chain(self.path.coords, other.path.coords))
     return RunningStitch(new_path, self.original_element)
Beispiel #2
0
def compute_polygon_section(baseline: Sequence[Tuple[int, int]],
                            boundary: Sequence[Tuple[int, int]], dist1: int,
                            dist2: int) -> List[Tuple[int, int]]:
    """
    Given a baseline, polygonal boundary, and two points on the baseline return
    the rectangle formed by the orthogonal cuts on that baseline segment. The
    resulting polygon is not garantueed to have a non-zero area.

    The distance can be larger than the actual length of the baseline if the
    baseline endpoints are inside the bounding polygon. In that case the
    baseline will be extrapolated to the polygon edge.

    Args:
        baseline (list): A polyline ((x1, y1), ..., (xn, yn))
        boundary (list): A bounding polygon around the baseline (same format as
                         baseline).
        dist1 (int): Absolute distance along the baseline of the first point.
        dist2 (int): Absolute distance along the baseline of the second point.

    Returns:
        A sequence of polygon points.
    """
    # find baseline segments the points are in
    if dist1 == 0:
        dist1 = np.finfo(float).eps
    if dist2 == 0:
        dist2 = np.finfo(float).eps
    boundary_pol = geom.Polygon(boundary)
    bl = np.array(baseline)
    # extend first/last segment of baseline if not on polygon boundary
    if boundary_pol.contains(geom.Point(bl[0])):
        logger.debug(
            f'Extending leftmost end of baseline {bl} to polygon boundary')
        l_point = boundary_pol.boundary.intersection(
            geom.LineString([(bl[0][0] - 10 * (bl[1][0] - bl[0][0]),
                              bl[0][1] - 10 * (bl[1][1] - bl[0][1])), bl[0]]))
        # intersection is incidental with boundary so take closest point instead
        if l_point.type != 'Point':
            bl[0] = np.array(
                nearest_points(geom.Point(bl[0]), boundary_pol)[1], 'int')
        else:
            bl[0] = np.array(l_point, 'int')
    if boundary_pol.contains(geom.Point(bl[-1])):
        logger.debug(
            f'Extending rightmost end of baseline {bl} to polygon boundary')
        r_point = boundary_pol.boundary.intersection(
            geom.LineString([(bl[-1][0] - 10 * (bl[-2][0] - bl[-1][0]),
                              bl[-1][1] - 10 * (bl[-2][1] - bl[-1][1])),
                             bl[-1]]))
        if r_point.type != 'Point':
            bl[-1] = np.array(
                nearest_points(geom.Point(bl[-1]), boundary_pol)[1], 'int')
        else:
            bl[-1] = np.array(r_point, 'int')
    dist1 = min(geom.LineString(bl).length - np.finfo(float).eps, dist1)
    dist2 = min(geom.LineString(bl).length - np.finfo(float).eps, dist2)
    dists = np.cumsum(np.diag(np.roll(squareform(pdist(bl)), 1)))
    segs_idx = np.searchsorted(dists, [dist1, dist2])
    segs = np.dstack((bl[segs_idx - 1], bl[segs_idx]))
    # compute unit vector of segments (NOT orthogonal)
    norm_vec = (segs[..., 1] - segs[..., 0])
    norm_vec_len = np.sqrt(np.sum(norm_vec**2, axis=1))
    unit_vec = norm_vec / np.tile(norm_vec_len, (2, 1)).T
    # find point start/end point on segments
    seg_dists = (dist1, dist2) - dists[segs_idx - 1]
    seg_points = segs[..., 0] + (seg_dists * unit_vec.T).T
    # get intersects
    bounds = np.array(boundary)
    try:
        points = [
            _test_intersect(point, uv[::-1], bounds).round()
            for point, uv in zip(seg_points, unit_vec)
        ]
    except ValueError:
        logger.debug('No intercepts with polygon (possibly misshaped polygon)')
        return seg_points.astype('int').tolist()
    o = np.int_(points[0]).reshape(-1, 2).tolist()
    o.extend(np.int_(np.roll(points[1], 2)).reshape(-1, 2).tolist())
    return o
Beispiel #3
0
def main():
    fig = plt.figure()
    # to get the effect of having just the states without a map "background"
    # turn off the background patch and axes frame
    ax = fig.add_axes([0, 0, 1, 1],
                      projection=ccrs.LambertConformal(),
                      frameon=False)
    ax.background_patch.set_visible(False)

    ax.set_extent([-125, -66.5, 20, 50], ccrs.Geodetic())

    shapename = 'admin_1_states_provinces_lakes_shp'
    states_shp = shpreader.natural_earth(resolution='110m',
                                         category='cultural',
                                         name=shapename)

    lons, lats = sample_data()

    ax.set_title('US States which intersect the track of '
                 'Hurricane Katrina (2005)')

    # turn the lons and lats into a shapely LineString
    track = sgeom.LineString(zip(lons, lats))

    # buffer the linestring by two degrees (note: this is a non-physical
    # distance)
    track_buffer = track.buffer(2)

    def colorize_state(geometry):
        facecolor = (0.9375, 0.9375, 0.859375)
        if geometry.intersects(track):
            facecolor = 'red'
        elif geometry.intersects(track_buffer):
            facecolor = '#FF7E00'
        return {'facecolor': facecolor, 'edgecolor': 'black'}

    ax.add_geometries(shpreader.Reader(states_shp).geometries(),
                      ccrs.PlateCarree(),
                      styler=colorize_state)

    ax.add_geometries([track_buffer],
                      ccrs.PlateCarree(),
                      facecolor='#C8A2C8',
                      alpha=0.5)
    ax.add_geometries([track],
                      ccrs.PlateCarree(),
                      facecolor='none',
                      edgecolor='k')

    # make two proxy artists to add to a legend
    direct_hit = mpatches.Rectangle((0, 0), 1, 1, facecolor="red")
    within_2_deg = mpatches.Rectangle((0, 0), 1, 1, facecolor="#FF7E00")
    labels = [
        'State directly intersects\nwith track',
        'State is within \n2 degrees of track'
    ]
    ax.legend([direct_hit, within_2_deg],
              labels,
              loc='lower left',
              bbox_to_anchor=(0.025, -0.1),
              fancybox=True)

    plt.show()
Beispiel #4
0
def vectorize_lines(im: np.ndarray, threshold: float = 0.17, min_length=5):
    """
    Vectorizes lines from a binarized array.

    Args:
        im (np.ndarray): Array of shape (3, H, W) with the first dimension
                         being probabilities for (start_separators,
                         end_separators, baseline).
        threshold (float): Threshold for baseline blob detection.
        min_length (int): Minimal length of output baselines.

    Returns:
        [[x0, y0, ... xn, yn], [xm, ym, ..., xk, yk], ... ]
        A list of lists containing the points of all baseline polylines.
    """
    # split into baseline and separator map
    st_map = im[0]
    end_map = im[1]
    bl_map = im[2]
    bl_map = filters.sato(bl_map, black_ridges=False, mode='constant')
    bin_bl_map = bl_map > threshold
    # skeletonize
    line_skel = skeletonize(bin_bl_map)
    # find end points
    kernel = np.array([[1, 1, 1], [1, 10, 1], [1, 1, 1]])
    line_extrema = np.transpose(
        np.where(
            (convolve2d(line_skel, kernel, mode='same') == 11) * line_skel))

    mcp = LineMCP(~line_skel)
    try:
        mcp.find_costs(line_extrema)
    except ValueError:
        return []

    lines = [
        approximate_polygon(line, 3).tolist()
        for line in mcp.get_connections()
    ]
    # extend baselines to blob boundary
    lines = _extend_boundaries(lines, bin_bl_map)

    # orient lines
    f_st_map = maximum_filter(st_map, size=20)
    f_end_map = maximum_filter(end_map, size=20)

    oriented_lines = []
    for bl in lines:
        l_end = tuple(bl[0])
        r_end = tuple(bl[-1])
        if f_st_map[l_end] - f_end_map[l_end] > 0.2 and f_st_map[
                r_end] - f_end_map[r_end] < -0.2:
            pass
        elif f_st_map[l_end] - f_end_map[l_end] < -0.2 and f_st_map[
                r_end] - f_end_map[r_end] > 0.2:
            bl = bl[::-1]
        else:
            logger.debug(
                'Insufficient marker confidences in output. Defaulting to upright line.'
            )
        if bl[0][1] > bl[-1][1]:
            bl = bl[::-1]
        if geom.LineString(bl).length >= min_length:
            oriented_lines.append([x[::-1] for x in bl])
    return oriented_lines
Beispiel #5
0
def calculate_polygonal_environment(
        im: PIL.Image.Image = None,
        baselines: Sequence[Sequence[Tuple[int, int]]] = None,
        suppl_obj: Sequence[Sequence[Tuple[int, int]]] = None,
        im_feats: np.ndarray = None,
        scale: Tuple[int, int] = None,
        topline: bool = False):
    """
    Given a list of baselines and an input image, calculates a polygonal
    environment around each baseline.

    Args:
        im (PIL.Image): grayscale input image (mode 'L')
        baselines (sequence): List of lists containing a single baseline per
                              entry.
        suppl_obj (sequence): List of lists containing additional polylines
                              that should be considered hard boundaries for
                              polygonizaton purposes. Can be used to prevent
                              polygonization into non-text areas such as
                              illustrations or to compute the polygonization of
                              a subset of the lines in an image.
        im_feats (numpy.array): An optional precomputed seamcarve energy map.
                                Overrides data in `im`. The default map is
                                `gaussian_filter(sobel(im), 2)`.
        scale (tuple): A 2-tuple (h, w) containing optional scale factors of
                       the input. Values of 0 are used for aspect-preserving
                       scaling. `None` skips input scaling.
        topline (bool): Switch to change default baseline location for offset
                        calculation purposes. If set to False, baselines are
                        assumed to be on the bottom of the text line and will
                        be offset upwards, if set to True, baselines are on the
                        top and will be offset downwards. If set to None, no
                        offset will be applied.
    Returns:
        List of lists of coordinates. If no polygonization could be compute for
        a baseline `None` is returned instead.
    """
    if scale is not None and (scale[0] > 0 or scale[1] > 0):
        w, h = im.size
        oh, ow = scale
        if oh == 0:
            oh = int(h * ow / w)
        elif ow == 0:
            ow = int(w * oh / h)
        im = im.resize((ow, oh))
        scale = np.array((ow / w, oh / h))
        # rescale baselines
        baselines = [(np.array(bl) * scale).astype('int').tolist()
                     for bl in baselines]
        # rescale suppl_obj
        if suppl_obj is not None:
            suppl_obj = [(np.array(bl) * scale).astype('int').tolist()
                         for bl in suppl_obj]

    if im_feats is None:
        bounds = np.array(im.size, dtype=float) - 1
        im = np.array(im.convert('L'))
        # compute image gradient
        im_feats = gaussian_filter(sobel(im), 0.5)
    else:
        bounds = np.array(im_feats.shape[::-1], dtype=float) - 1

    polygons = []
    if suppl_obj is None:
        suppl_obj = []

    for idx, line in enumerate(baselines):
        try:
            end_points = (line[0], line[-1])
            line = geom.LineString(line)
            offset = default_specs.SEGMENTATION_HYPER_PARAMS[
                'line_width'] if topline is not None else 0
            offset_line = line.parallel_offset(
                offset, side='left' if topline else 'right')
            line = np.array(line, dtype=float)
            offset_line = np.array(offset_line, dtype=float)

            # parallel_offset on the right reverses the coordinate order
            if not topline:
                offset_line = offset_line[::-1]
            # calculate magnitude-weighted average direction vector
            lengths = np.linalg.norm(np.diff(line.T), axis=0)
            p_dir = np.mean(np.diff(line.T) * lengths / lengths.sum(), axis=1)
            p_dir = (p_dir.T / np.sqrt(np.sum(p_dir**2, axis=-1)))

            env_up, env_bottom = _calc_roi(
                line, bounds, baselines[:idx] + baselines[idx + 1:], suppl_obj,
                p_dir)

            polygons.append(
                _extract_patch(env_up, env_bottom, line.astype('int'),
                               offset_line.astype('int'), end_points, p_dir,
                               topline, offset, im_feats))
        except Exception as e:
            logger.warning(f'Polygonizer failed on line {idx}: {e}')
            polygons.append(None)

    if scale is not None:
        polygons = [
            (np.array(pol) /
             scale).astype('uint').tolist() if pol is not None else None
            for pol in polygons
        ]
    return polygons
Beispiel #6
0
    def step(self, agent, world):
        if self.torus:
            points_center = np.vstack(
                [world.agent_states[:, 0:2], self.state.p_pos])
            pursuers_down_right = np.hstack([
                world.agent_states[:, 0:1] + world.world_size,
                world.agent_states[:, 1:2]
            ])
            pursuers_up_left = np.hstack([
                world.agent_states[:, 0:1],
                world.agent_states[:, 1:2] + world.world_size
            ])
            pursuers_up_right = np.hstack([
                world.agent_states[:, 0:1] + world.world_size,
                world.agent_states[:, 1:2] + world.world_size
            ])
            evader_down_right = np.hstack([
                self.state.p_pos[0:1] + world.world_size, self.state.p_pos[1:2]
            ])
            evader_up_left = np.hstack([
                self.state.p_pos[0:1], self.state.p_pos[1:2] + world.world_size
            ])
            evader_up_right = np.hstack([
                self.state.p_pos[0:1] + world.world_size,
                self.state.p_pos[1:2] + world.world_size
            ])
            points_down_right = np.hstack([
                points_center[:, 0:1] + world.world_size, points_center[:, 1:2]
            ])
            points_up_left = np.hstack([
                points_center[:, 0:1], points_center[:, 1:2] + world.world_size
            ])
            points_up_right = np.hstack([
                points_center[:, 0:1] + world.world_size,
                points_center[:, 1:2] + world.world_size
            ])

            nodes = np.vstack([
                world.agent_states[:, 0:2], pursuers_down_right,
                pursuers_up_left, pursuers_up_right, self.state.p_pos,
                evader_down_right, evader_up_left, evader_up_right
            ])

            dist_matrix_full = U.get_euclid_distances(nodes)

            quadrant_check = np.sign(self.state.p_pos - world.world_size / 2)
            if np.all(quadrant_check == np.array([1, 1])):
                evader_quadrant = 0
            elif np.all(quadrant_check == np.array([-1, 1])):
                evader_quadrant = 1
            elif np.all(quadrant_check == np.array([1, -1])):
                evader_quadrant = 2
            elif np.all(quadrant_check == np.array([-1, -1])):
                evader_quadrant = 3

            evader_dist = dist_matrix_full[:-4, -4 + evader_quadrant]
            sub_list = list(np.where(evader_dist < self.obs_radius)[0])
            if len(sub_list) > 10:
                sub_list = list(np.argsort(evader_dist)[0:10])
            sub_list.append(4 * world.nr_agents + evader_quadrant)
            evader_sub = len(sub_list) - 1
            closest_pursuer = np.where(evader_dist == evader_dist.min())[0]

            nodes_center_sub = nodes[sub_list, :]
            nodes_left = np.copy(nodes_center_sub)
            nodes_left[:, 0] = self.bounding_box[0] - (nodes_left[:, 0] -
                                                       self.bounding_box[0])
            nodes_right = np.copy(nodes_center_sub)
            nodes_right[:, 0] = self.bounding_box[1] + (self.bounding_box[1] -
                                                        nodes_right[:, 0])
            nodes_down = np.copy(nodes_center_sub)
            nodes_down[:, 1] = self.bounding_box[2] - (nodes_down[:, 1] -
                                                       self.bounding_box[2])
            nodes_up = np.copy(nodes_center_sub)
            nodes_up[:, 1] = self.bounding_box[3] + (self.bounding_box[3] -
                                                     nodes_up[:, 1])

            points = np.vstack([
                nodes_center_sub, nodes_down, nodes_left, nodes_right, nodes_up
            ])

        else:
            nodes = np.vstack([
                world.agent_states[:, 0:2],
                self.state.p_pos,
            ])
            distances = U.get_euclid_distances(nodes)
            evader_dist = distances[-1, :-1]
            closest_pursuer = np.where(evader_dist == evader_dist.min())[0]
            sub_list = list(np.where(evader_dist < self.obs_radius)[0])
            if len(sub_list) > 10:
                sub_list = list(np.argsort(evader_dist)[0:10])
            sub_list.append(world.nr_agents)
            evader_sub = len(sub_list) - 1

            nodes_center_sub = nodes[sub_list, :]
            nodes_left = np.copy(nodes_center_sub)
            nodes_left[:, 0] = self.bounding_box[0] - (nodes_left[:, 0] -
                                                       self.bounding_box[0])
            nodes_right = np.copy(nodes_center_sub)
            nodes_right[:, 0] = self.bounding_box[1] + (self.bounding_box[1] -
                                                        nodes_right[:, 0])
            nodes_down = np.copy(nodes_center_sub)
            nodes_down[:, 1] = self.bounding_box[2] - (nodes_down[:, 1] -
                                                       self.bounding_box[2])
            nodes_up = np.copy(nodes_center_sub)
            nodes_up[:, 1] = self.bounding_box[3] + (self.bounding_box[3] -
                                                     nodes_up[:, 1])

            points = np.vstack([
                nodes_center_sub, nodes_down, nodes_left, nodes_right, nodes_up
            ])

        vor = ssp.Voronoi(points)

        d = np.zeros(2)

        for i, ridge in enumerate(vor.ridge_points):
            if evader_sub in set(ridge) and np.all(
                [r <= evader_sub for r in ridge]):
                if self.torus:
                    neighbor = min([sub_list[r] for r in ridge])
                else:
                    # neighbor = min(ridge)
                    neighbor = min([sub_list[r] for r in ridge])

                if neighbor in closest_pursuer:
                    ridge_inds = vor.ridge_vertices[i]
                    a = vor.vertices[ridge_inds[0], :]
                    b = vor.vertices[ridge_inds[1], :]

                    line_of_control = b - a
                    L_i = np.linalg.norm(line_of_control)

                    if self.torus:
                        xi = nodes[neighbor, :] - nodes[4 * world.nr_agents +
                                                        evader_quadrant]
                    else:
                        xi = nodes[neighbor, :] - self.state.p_pos
                    eta_h_i = xi / np.linalg.norm(xi)
                    eta_v_i = np.array([-eta_h_i[1], eta_h_i[0]])

                    if self.torus:
                        line1 = sg.LineString([
                            nodes[4 * world.nr_agents + evader_quadrant],
                            nodes[neighbor, :]
                        ])
                    else:
                        line1 = sg.LineString(
                            [self.state.p_pos, nodes[neighbor, :]])
                    line2 = sg.LineString([a, b])
                    intersection = line1.intersection(line2)

                    if not intersection.is_empty:
                        inter_point = np.hstack(intersection.xy)

                        if np.dot(line_of_control, eta_v_i.flatten()) > 0:
                            l_i = np.linalg.norm(a - inter_point)
                        else:
                            l_i = np.linalg.norm(b - inter_point)
                    else:
                        if np.dot(line_of_control, eta_v_i.flatten()) > 0:
                            l_i = 0
                        else:
                            l_i = L_i

                    alpha_h_i = -L_i / 2
                    alpha_v_i = (l_i**2 -
                                 (L_i - l_i)**2) / (2 * np.linalg.norm(xi))

                    d = (alpha_h_i * eta_h_i - alpha_v_i *
                         eta_v_i) / np.sqrt(alpha_h_i**2 + alpha_v_i**2)

        assert ('d' in locals())

        return d
Beispiel #7
0
def adjust_text_radial_plus_repulsion(texts,
                                      alpha=0.5,
                                      ax=None,
                                      add_line=True,
                                      origin=(0, 0),
                                      n_iter_max=15,
                                      tol=1e-4,
                                      min_line_length=0.05,
                                      lineprops=None,
                                      draw_below_intersection_prop=0.1,
                                      constrain_to_ax_limits=False):
    # TODO: would like to add repulsion between text and points, too
    """

    :param texts:
    :param alpha:
    :param add_line: If True (default), add lines to show where the annotations point. If False, this overrides any
    other option relating to the appearance of lines.
    :param origin:
    :param n_iter_max:
    :param tol: The value of the L1 norm of all overlap vectors at which we consider the job done. This will depend upon
    the scale of the plot. TODO: could select it automatically based on a heuristic?
    :param min_line_length: The minimum length of a line, below which it isn't plotted. This is overridden by
    only_draw_if_non_intersecting.
    :param draw_below_intersection_prop: Don't draw lines when the proportion of the new text bounding box overlapping
    with the old is below this value. Set to None or 1. to disable this option. Set to 0 if any intersection at all
    should prevent line drawing.
    :return:
    """
    if add_line and lineprops is None:
        lineprops = {
            'color': 'black',
            'linewidth': 1.,
        }

    if ax is None:
        ax = plt.gca()

    if draw_below_intersection_prop >= 1:
        draw_below_intersection_prop = None

    if add_line and draw_below_intersection_prop is not None:
        r = get_renderer(ax.get_figure())
        bboxes_orig = get_bboxes(texts, r, ax=ax)

    # record original positions
    orig_pos = [t.get_position() for t in texts]
    dx = None
    dy = None
    converged = False

    for i in range(n_iter_max):
        this_dx, this_dy, q = rearrange_text_radially(
            texts,
            alpha=alpha,
            origin=origin,
            constrain_to_ax_limits=constrain_to_ax_limits)

        if dx is None:
            dx = this_dx
        else:
            dx += this_dx
        if dy is None:
            dy = this_dy
        else:
            dy += this_dy

        if sum(q) < tol:
            logger.info("Rearrange text converged after %d iterations.",
                        (i + 1))
            converged = True
            break

    if not converged:
        logger.warning("Failed to converge after %d iterations", n_iter_max)

    if add_line:
        if draw_below_intersection_prop is not None:
            bboxes = get_bboxes(texts, r, ax=ax)

        for i, (xy, ddx, ddy) in enumerate(zip(orig_pos, dx, dy)):
            # check (1): is the line long enough to be worth drawing?
            if np.abs(np.array([ddx, ddy])).sum() < min_line_length:
                continue

            # check (2): is the new location still intersecting the original position?
            if draw_below_intersection_prop is not None:
                this_bbox = bboxes[i]
                this_bbox_orig = bboxes_orig[i]

                if draw_below_intersection_prop == 0 and this_bbox.overlaps(
                        this_bbox_orig):
                    continue
                else:
                    overlap = this_bbox.intersection(this_bbox, this_bbox_orig)
                    if overlap is not None:
                        a0 = this_bbox.height * this_bbox.width
                        a1 = overlap.height * overlap.width
                        if (a1 / a0) > draw_below_intersection_prop:
                            continue

            if SHAPELY_PRESENT:
                this_bbox = bboxes[i]
                # central coordinates
                cx, cy = get_midpoint(this_bbox)
                line = geometry.LineString([xy, [cx, cy]])
                x0, y0, w, h = this_bbox.bounds
                rect = geometry.Polygon([[x0, y0], [x0, y0 + h],
                                         [x0 + w, y0 + h], [x0 + w, y0],
                                         [x0, y0]])
                if not rect.intersects(line):
                    # why would this happen? rounding error?
                    # use the default anchor position
                    new_x = xy[0] + ddx
                    new_y = xy[1] + ddy
                else:
                    iline = rect.intersection(line)
                    # decide which intersection to use
                    dd = dict([(geometry.LineString([xy, u]).length, u)
                               for u in iline.coords])
                    new_x, new_y = dd[min(dd.keys())]

            else:
                logger.warn(
                    "No shapely library present; lines will be drawn from the default anchor point."
                )
                new_x = xy[0] + ddx
                new_y = xy[1] + ddy

            ax.plot([xy[0], new_x], [xy[1], new_y], **lineprops)

    return dx, dy, (i + 1)
Beispiel #8
0
def plot_modeloutput_map(gdirs,
                         ax=None,
                         smap=None,
                         model=None,
                         vmax=None,
                         linewidth=3,
                         filesuffix='',
                         modelyr=None):
    """Plots the result of the model output."""

    gdir = gdirs[0]
    with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc:
        topo = nc.variables['topo'][:]

    # Dirty optim
    try:
        smap.set_topography(topo)
    except ValueError:
        pass

    toplot_th = np.array([])
    toplot_lines = []
    toplot_crs = []

    if model is None:
        models = []
        for gdir in gdirs:
            model = FileModel(
                gdir.get_filepath('model_geometry', filesuffix=filesuffix))
            model.run_until(modelyr)
            models.append(model)
    else:
        models = utils.tolist(model)
    for gdir, model in zip(gdirs, models):
        geom = gdir.read_pickle('geometries')
        poly_pix = geom['polygon_pix']

        crs = gdir.grid.center_grid
        smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2)

        poly_pix = utils.tolist(poly_pix)
        for _poly in poly_pix:
            for l in _poly.interiors:
                smap.set_geometry(l, crs=crs, color='black', linewidth=0.5)

        # plot Centerlines
        cls = model.fls
        for l in cls:
            smap.set_geometry(l.line,
                              crs=crs,
                              color='gray',
                              linewidth=1.2,
                              zorder=50)
            toplot_th = np.append(toplot_th, l.thick)
            widths = l.widths.copy()
            widths = np.where(l.thick > 0, widths, 0.)
            for wi, cur, (n1, n2) in zip(widths, l.line.coords, l.normals):
                line = shpg.LineString([
                    shpg.Point(cur + wi / 2. * n1),
                    shpg.Point(cur + wi / 2. * n2)
                ])
                toplot_lines.append(line)
                toplot_crs.append(crs)

    dl = salem.DataLevels(cmap=OGGM_CMAPS['section_thickness'],
                          data=toplot_th,
                          vmin=0,
                          vmax=vmax)
    colors = dl.to_rgb()
    for l, c, crs in zip(toplot_lines, colors, toplot_crs):
        smap.set_geometry(l, crs=crs, color=c, linewidth=linewidth, zorder=50)
    smap.plot(ax)
    return dict(cbar_label='Section thickness [m]',
                cbar_primitive=dl,
                title_comment=' -- year: {:d}'.format(np.int64(model.yr)))
def find_subpath(start, end, last_dis, last_path, ob_dis):
    # global st.obs
    # global st.global_sub_min
    # global st.global_sub_path
    global keep
    line = ge.LineString([start, end])
    dis = float('inf')
    first_ob = None
    for ob in st.obs:
        if line.crosses(ob):
            dis_ = start.distance(ob)
            if dis_ == ob_dis:
                if end.intersects(ob):
                    sumdis = 0
                    ob_path = []
                    ext_ob = ob.exterior.coords[:]
                    for i in range(len(ext_ob)):
                        if ext_ob[i] == start.coords:
                            start = i
                            p2 = ge.Point(ext_ob[i])
                            while ext_ob[i] != end.coords:
                                i += 1
                                p1 = p2
                                p2 = ge.Point(ext_ob[i])
                                sumdis += p1.distance(ge.Point(p2))
                                ob_path.append(ge.LineString([p1, p2]))
                            end = i
                            if ob.length < 2 * sumdis:
                                sumdis = ob.length - sumdis
                                ob_path = []
                                for j in range(start, 0, -1):
                                    ob_path.append(
                                        ge.LineString(
                                            [ext_ob[i], ext_ob[i - 1]]))
                                for j in range(len(ext_ob) - 1, end, -1):
                                    ob_path.append(
                                        ge.LineString(
                                            [ext_ob[i], ext_ob[i - 1]]))
                            print('return ob_path', ob_path)
                            return sumdis, ob_path

            if dis_ < dis and dis_ != ob_dis and (not end.within(ob)):
                # print('in',dis,ob_dis)
                first_ob = ob
                dis = dis_
    # print('\nfirst_ob',first_ob)
    if not first_ob:
        return start.distance(end), [line]
    else:
        # dis+=last_dis
        new_convex = ge.Polygon(
            list(first_ob.exterior.coords[:]) +
            [(start.x, start.y), (end.x, end.y)]).convex_hull
        # new_starts_ = list(new_convex.exterior.intersection(first_ob.exterior))[0]
        # print('new_starts_',new_starts_)
        # print('start',start)
        # print('new_convex',new_convex)
        # print('first_ob',first_ob)
        new_starts = []
        ext = new_convex.exterior.coords[:]
        ext_2 = first_ob.exterior.coords[:]
        index = ext.index((end.x, end.y))
        for i in range(index - 1, -1, -1):
            if ext[i] in ext_2:
                new_starts.append(ge.Point(ext[i]))
                break
        for i in range(index + 1, index + len(ext)):
            if i >= len(ext):
                i -= len(ext)
            if ext[i] in ext_2:
                new_starts.append(ge.Point(ext[i]))
                break
        # print(new_starts_)
        # keep=new_starts_
        # for points in list(new_starts_):
        #     points_set=[ge.Point(coord) for coord in list(points.coords)]
        #     for p in points_set:
        #         if not p.equals(start):
        #             new_starts.append(p)
        # else:
        # print(points)
        # print('pre',new_starts,'\n\n\n')
        # print('new_starts',new_starts)
        next_path_real = None
        for new_start in new_starts:
            # print(line,'\n',first_ob,'\n',new_convex,'\n',new_starts,'\n',dis,'\n',ob_dis,'\n',first_ob,'\n\n')
            this_dis, this_path = find_subpath(start, new_start, 0, [], dis)
            # print(this_dis)
            dis_pre = last_dis + this_dis
            path_pre = last_path + this_path
            if dis_pre + new_start.distance(
                    end
            ) < st.global_sub_min - 0.005:  # can add an offset someproblem in this prun,should put in more argument
                # print('sub',new_start,start)
                next_dis, next_path = find_subpath(
                    new_start, end, dis_pre, path_pre, float('inf')
                )  # error! should check (start new_start) cross st.obstacle
                if dis_pre + next_dis < st.global_sub_min:
                    st.global_sub_min = dis_pre + next_dis
                    st.global_sub_path = path_pre + next_path
                    next_path_real = next_path
                    next_start = new_start
        if not next_path_real:
            return float('inf'), []
        else:
            return st.global_sub_min - last_dis, [
                ge.LineString([start, next_start])
            ] + next_path_real
Beispiel #10
0
def build_db(geoId):
    api = overpass.API(timeout=10000)

    # New Hampshire and some surrounding areas
    BOUNDING_BOX = '43.12103377575541,-73.7237548828125,44.797428998555674,-69.378662109375'

    #  Boyne Highlands
    # BOUNDING_BOX = '45,-85,46,-84'

    if geoId:
       BOUNDING_BOX = BB_LIST[int(geoId)]

    # Snowbird/Alta ... Let's see.
    # BOUNDING_BOX = '40.5, -111.74, 40.601, -11.58'

    # Park City
    # BOUNDING_BOX = '40.5, -111.62, 40.8, -111.244'

    # Core of White Mountains
    #BOUNDING_BOX = '44.00269350325321,-71.64871215820312,44.36067856998804,-71.12411499023438'

    # All of NE US/CAN
    #BOUNDING_BOX = '37.70120736474139,-81.7822265625,49.23912083246698,-64.40185546874999'

    # response = api.Get('node["name"="Salt Lake City"]')
    # response = api.Get('way["piste:type"~"downhill|yes"](44.00269350325321,-71.64871215820312,44.36067856998804,-71.12411499023438)')
    print('getting trails')
    trail_response = api.Get(build_query('way["piste:type"~"downhill|yes"](', BOUNDING_BOX), 'json')

    print('getting lifts')
    lift_response = api.Get(build_query('way["aerialway"~""](',BOUNDING_BOX), 'json')

    print('getting ski areas')
    ski_area_response = api.Get(build_query('way["landuse"~"winter_sports"](', BOUNDING_BOX), 'json')

    ski_areas = []
    trails = []
    lifts = []
    print('storing ski areas')
    for sa in ski_area_response.features:

        exists = SkiArea.query.filter_by(osm_id=sa['id'])
        if exists.count():
            #TODO: Allow ski area updates to process
            continue

        sam = SkiArea()
        sam.name = sa['properties'].get('name')
        if not sam.name:
            continue
        sam.osm_id = sa['id']
        sam.boundary = str(geometry.Polygon(sa['geometry']['coordinates']))
        ski_areas.append(sam)
        db.session.add(sam)

    db.session.commit()

    print('storing trails')
    for trail in trail_response.features:

        if trail['properties'].get('name') is None:
            continue
        exists = Trail.query.filter_by(osm_id=trail['id'])
        if exists.count():
            new_trail = False
            t = exists[0]
        else:
            new_trail = True
            t= Trail()

        t.name = trail['properties'].get('name')
        t.difficulty = trail['properties'].get('piste:difficulty').lower() if 'piste:difficulty' in trail['properties'] else None
        t.osm_id = trail['id']
        t.path = str(geometry.LineString(trail['geometry']['coordinates']))

        if new_trail:
            db.session.add(t)
        db.session.commit()

        if not new_trail:
            # Remaining steps only apply to new trails
            continue

        # Determine which ski area this trail is a part of.
        # results = db.session.query(SkiArea).filter(SkiArea.boundary.ST_Contains(geoalchemy2.WKBElement(geometry.LineString(trail['geometry']['coordinates']))))
        results = db.session.query(SkiArea).filter(SkiArea.boundary.ST_Intersects(t.path))
        if results.count() > 1:
            print('way ' + str(trail['id']) + ' is confused')

        if results.count() > 0:
            t.ski_area_id = results[0].id
            continue

        # if results.count() != 1:
        results = find_nearest_ski_area(t, api)

        if results:
            t.ski_area_id = results
            continue

        # t.ski_area = sam
        db.session.commit()

    db.session.commit()

    print('storing lifts')
    for lift in lift_response.features:

        exists = Lift.query.filter_by(osm_id=lift['id'])
        is_update = exists.count()

        if not is_update:
            lt = Lift()
        else:
            lt = exists[0]
        lt.name = lift['properties'].get('name')
        lt.type = lift['properties'].get('aerialway')
        lt.osm_id = lift['id']
        lt.path = str(geometry.LineString(lift['geometry']['coordinates']))
        lt.occupancy = lift['properties'].get('aerialway:occupancy', lift['properties'].get('capacity'))

        if not is_update:
            db.session.add(lt)

        db.session.commit()

        if is_update:
            continue

        results = db.session.query(SkiArea).filter(SkiArea.boundary.ST_Contains(lt.path))
        if results.count() == 1:

            lt.ski_area_id = results[0].id
            continue

        results = find_nearest_ski_area(lt, api)
        if results:
            lt.ski_area_id = results

    db.session.commit()

    # Clean up results.

    # Cycle through unaffiliated lifts and trails. Attempt to affiliate.
    # Continue to cycle until all lifts trails have affiliated or iterating isn't helping.
    lifts = Lift.query.filter_by(ski_area_id = None)
    num_unaffiliated_lifts = lifts.count()
    trails = Trail.query.filter_by(ski_area_id = None)
    num_unaffiliated_trails = trails.count()

    while num_unaffiliated_lifts > 0 and num_unaffiliated_trails > 0:
        print(num_unaffiliated_lifts)

        for lt in lifts:

            result = find_nearest_ski_area(lt)
            if result:
                lt.ski_area_id = result
                db.session.commit()

        for t in trails:

            result = find_nearest_ski_area(t)
            if result:
                t.ski_area_id = result
                db.session.commit()

        lifts = Lift.query.filter_by(ski_area_id = None)
        trails = Trail.query.filter_by(ski_area_id = None)
        if lifts.count() == num_unaffiliated_lifts and trails.count() == num_unaffiliated_trails:
            break

        num_unaffiliated_lifts = lifts.count()
        num_unaffiliated_trails = trails.count()

    # Associates ski areas with states
    # TODO: Rename all of this.
    update_ski_areas.update()

    print 'done'
Beispiel #11
0
def plot_catchment_width(gdirs,
                         ax=None,
                         smap=None,
                         corrected=False,
                         add_intersects=False,
                         add_touches=False,
                         lines_cmap='Set1'):
    """Plots the catchment widths out of a glacier directory.
    """

    gdir = gdirs[0]
    with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc:
        topo = nc.variables['topo'][:]
    # Dirty optim
    try:
        smap.set_topography(topo)
    except ValueError:
        pass

    # Maybe plot touches
    xis, yis, cis = [], [], []
    ogrid = smap.grid

    for gdir in gdirs:
        crs = gdir.grid.center_grid
        geom = gdir.read_pickle('geometries')

        # Plot boundaries
        poly_pix = geom['polygon_pix']
        smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2)
        for l in poly_pix.interiors:
            smap.set_geometry(l, crs=crs, color='black', linewidth=0.5)

        # Plot intersects
        if add_intersects and gdir.has_file('intersects'):
            gdf = gdir.read_shapefile('intersects')
            smap.set_shapefile(gdf, color='k', linewidth=3.5, zorder=3)

        # plot Centerlines
        cls = gdir.read_pickle('inversion_flowlines')[::-1]
        color = gencolor(len(cls) + 1, cmap=lines_cmap)
        for l, c in zip(cls, color):
            smap.set_geometry(l.line,
                              crs=crs,
                              color=c,
                              linewidth=2.5,
                              zorder=50)
            if corrected:
                for wi, cur, (n1, n2) in zip(l.widths, l.line.coords,
                                             l.normals):
                    _l = shpg.LineString([
                        shpg.Point(cur + wi / 2. * n1),
                        shpg.Point(cur + wi / 2. * n2)
                    ])

                    smap.set_geometry(_l,
                                      crs=crs,
                                      color=c,
                                      linewidth=0.6,
                                      zorder=50)
            else:
                for wl, wi in zip(l.geometrical_widths, l.widths):
                    col = c if np.isfinite(wi) else 'grey'
                    for w in wl:
                        smap.set_geometry(w,
                                          crs=crs,
                                          color=col,
                                          linewidth=0.6,
                                          zorder=50)

            if add_touches:
                pok = np.where(l.is_rectangular)
                xi, yi = l.line.xy
                xi, yi = ogrid.transform(np.asarray(xi)[pok],
                                         np.asarray(yi)[pok],
                                         crs=crs)
                xis.append(xi)
                yis.append(yi)
                cis.append(c)

    smap.plot(ax)
    for xi, yi, c in zip(xis, yis, cis):
        ax.scatter(xi, yi, color=c, s=20, zorder=51)

    return {}
Beispiel #12
0
def segsplit(s1, s2, tol=0.0001, alpha=0.1):
    """ split segment

    Parameters
    ----------

    s1    : shapely LineString
    s2    : shapely LineString
    tol   : tolerance for point equality test
    alpha : stretching factor

    Returns
    -------
    ts   : list of segment
    bks1 : boolean keep s1
    bks2 : boolean keep s2


    Examples
    --------

    >>> s1 = shg.LineString(((0,0),(1,0)))
    >>> s2 = shg.LineString(((1,0),(2,0)))
    >>> s3 = shg.LineString(((1,-10),(1,10)))
    >>> s4 = shg.LineString(((0.5,-10),(0.5,10)))
    >>> ts1 = segsplit(s1,s2)
    >>> ts2 = segsplit(s1,s3)
    >>> ts3 = segsplit(s1,s4)

    """
    ts1 = []
    ts2 = []
    bks1 = True
    bks2 = True
    beta = alpha / (2 * alpha + 1)
    if s1.intersects(s2):
        p1t, p1h = s1.boundary
        p2t, p2h = s2.boundary
        ls1 = s1.length
        ls2 = s2.length
        pi = s1.intersection(s2)
        if s1.touches(s2):  # touching segments
            if not (pi.equals_exact(p1t, tol) or pi.equals_exact(p1h, tol)):
                s11 = shg.LineString(
                    ((p1t.xy[0][0], p1t.xy[1][0]), (pi.xy[0][0], pi.xy[1][0])))
                s12 = shg.LineString(
                    ((pi.xy[0][0], pi.xy[1][0]), (p1h.xy[0][0], p1h.xy[1][0])))
                if s11.length > 0 and s11.length >= alpha:
                    ts1.append(s11)
                if s12.length > 0 and s12.length >= alpha:
                    ts1.append(s12)
                bks1 = False
            if not (pi.equals_exact(p2t, tol) or pi.equals_exact(p2t, tol)):
                s21 = shg.LineString(
                    ((p2t.xy[0][0], p2t.xy[1][0]), (pi.xy[0][0], pi.xy[1][0])))
                s22 = shg.LineString(
                    ((pi.xy[0][0], pi.xy[1][0]), (p2h.xy[0][0], p2h.xy[1][0])))
                if s21.length > 0 and s21.length > alpha:
                    ts2.append(s21)
                if s22.length > 0 and s22.length >= alpha:
                    ts2.append(s22)
                bks2 = False
        else:  # crossing segments
            s11 = shg.LineString(
                ((p1t.xy[0][0], p1t.xy[1][0]), (pi.xy[0][0], pi.xy[1][0])))
            s12 = shg.LineString(
                ((pi.xy[0][0], pi.xy[1][0]), (p1h.xy[0][0], p1h.xy[1][0])))
            s21 = shg.LineString(
                ((p2t.xy[0][0], p2t.xy[1][0]), (pi.xy[0][0], pi.xy[1][0])))
            s22 = shg.LineString(
                ((pi.xy[0][0], pi.xy[1][0]), (p2h.xy[0][0], p2h.xy[1][0])))
            ls11 = s11.length
            ls12 = s12.length
            ls21 = s21.length
            ls22 = s22.length

            if ls11 > ls12:
                ts1.append(s11)
            else:
                ts1.append(s12)

            if ls21 > ls22:
                ts2.append(s21)
            else:
                ts2.append(s22)
            # if s11.length>0 and s11.length>=alpha:
            #    ts1.append(s11)
            # if s12.length>0 and s12.length>=alpha:
            #    ts1.append(s12)
            # if s21.length>0 and s21.length>=alpha:
            #    ts2.append(s21)
            # if s22.length>0 and s21.length>=alpha:
            #    ts2.append(s22)
            bks1 = False
            bks2 = False
    return (ts1, ts2, bks1, bks2)
Beispiel #13
0
def process_junctions_and_line_map(h_start,
                                   w_start,
                                   H,
                                   W,
                                   H_scale,
                                   W_scale,
                                   junctions,
                                   line_map,
                                   mode="zoom-in"):
    if mode == "zoom-in":
        junctions[:, 0] = junctions[:, 0] * H_scale / H
        junctions[:, 1] = junctions[:, 1] * W_scale / W
        line_segments = homoaug.convert_to_line_segments(junctions, line_map)
        # Crop segments to the new boundaries
        line_segments_new = np.zeros([0, 4])
        image_poly = sg.Polygon([[w_start, h_start], [w_start + W, h_start],
                                 [w_start + W, h_start + H],
                                 [w_start, h_start + H]])
        for idx in range(line_segments.shape[0]):
            # Get the line segment
            seg_raw = line_segments[idx, :]  # in HW format.
            # Convert to shapely line (flip to xy format)
            seg = sg.LineString([np.flip(seg_raw[:2]), np.flip(seg_raw[2:])])
            # The line segment is just inside the image.
            if seg.intersection(image_poly) == seg:
                line_segments_new = np.concatenate(
                    (line_segments_new, seg_raw[None, ...]), axis=0)
            # Intersect with the image.
            elif seg.intersects(image_poly):
                # Check intersection
                try:
                    p = np.array(seg.intersection(image_poly).coords).reshape(
                        [-1, 4])
                # If intersect at exact one point, just continue.
                except:
                    continue
                segment = np.concatenate(
                    [np.flip(p[0, :2]),
                     np.flip(p[0, 2:], axis=0)])[None, ...]
                line_segments_new = np.concatenate(
                    (line_segments_new, segment), axis=0)
            else:
                continue
        line_segments_new = (np.round(line_segments_new)).astype(np.int)
        # Filter segments with 0 length
        segment_lens = np.linalg.norm(line_segments_new[:, :2] -
                                      line_segments_new[:, 2:],
                                      axis=-1)
        seg_mask = segment_lens != 0
        line_segments_new = line_segments_new[seg_mask, :]
        # Convert back to junctions and line_map
        junctions_new = np.concatenate(
            (line_segments_new[:, :2], line_segments_new[:, 2:]), axis=0)
        if junctions_new.shape[0] == 0:
            junctions_new = np.zeros([0, 2])
            line_map = np.zeros([0, 0])
        else:
            junctions_new = np.unique(junctions_new, axis=0)
            # Generate line map from points and segments
            line_map = get_line_map(junctions_new,
                                    line_segments_new).astype(np.int)
        junctions_new[:, 0] -= h_start
        junctions_new[:, 1] -= w_start
        junctions = junctions_new
    elif mode == "zoom-out":
        # Process the junctions
        junctions[:, 0] = (junctions[:, 0] * H_scale / H) + h_start
        junctions[:, 1] = (junctions[:, 1] * W_scale / W) + w_start
    else:
        raise ValueError("[Error] unknown mode...")

    return junctions, line_map
Beispiel #14
0
 def test_linestring(self):
     a = numpy.array([[1.0, 1.0, 2.0, 2.0, 1.0], [3.0, 4.0, 4.0, 3.0, 3.0]])
     t = a.T
     s = geometry.LineString(t)
     self.assertEqual(list(s.coords), [(1.0, 3.0), (1.0, 4.0), (2.0, 4.0),
                                       (2.0, 3.0), (1.0, 3.0)])
Beispiel #15
0
 def boundary(self):
     x0, x1, y0, y1 = self.bounds
     return sgeom.LineString([(x0, y0), (x0, y1), (x1, y1), (x1, y0),
                              (x0, y0)])
def find(start, end, last_dis, last_path):
    # global st.obs
    # global st.global_min
    # global st.global_path
    # global st.global_sub_min
    # global st.global_sub_path
    # print('start',start,'\n end',end)
    line = ge.LineString([start, end])
    dis = float('inf')
    first_ob = None
    global keep
    for ob in st.obs:
        if line.crosses(ob):
            dis_ = start.distance(ob)
            if dis_ < dis and (not end.intersects(ob)):
                first_ob = ob
                dis = dis_
    # print('\nfirst_ob',first_ob)
    if not first_ob:
        return start.distance(end), [line]
    else:
        # dis+=last_dis
        new_convex = pan.GeoSeries(
            ge.Polygon(
                list(first_ob.exterior.coords[:]) +
                [(start.x, start.y), (end.x, end.y)])).convex_hull
        new_starts_ = list(new_convex.exterior.intersection(
            first_ob.exterior))[0]
        # print('new_starts_',new_starts_)
        # print('start',start)
        # print('new_convex',new_convex)
        # print('first_ob',first_ob)
        new_starts = []
        for points in list(new_starts_):
            # for coord in list(points.coords):
            # new_starts.apst.pend(ge.Point(coord))
            new_starts.extend(
                [ge.Point(coord) for coord in list(points.coords)])
            # new_starts.apst.pend(ge.Point(list(points.coords)[-1]))
        # print('pre',new_starts,'\n\n\n')
        # print('new_starts',new_starts)
        next_path_real = None
        for new_start in new_starts:
            new_path = ge.LineString([start, new_start])
            st.global_sub_min = start.distance(new_start)
            st.global_sub_path = [new_path]
            for ob in st.obs:
                if new_path.crosses(ob):
                    dis_new = start.distance(ob)
                    if dis_new != dis:
                        st.global_sub_min = float('inf')
                        st.global_sub_path = []
                        # print(start,new_start)
                        find_subpath(start, new_start, 0, [], dis)
                        break
            # print(st.global_sub_path,st.global_sub_min)
            dis_pre = last_dis + st.global_sub_min
            path_pre = last_path + st.global_sub_path
            if dis_pre + new_start.distance(
                    end
            ) < st.global_min - 0.03:  #can add an offset someproblem in this prun,should put in more argument
                # print(new_start,end)
                keep = [new_start, end]
                # print(end)
                next_dis, next_path = find(
                    new_start, end, dis_pre, path_pre
                )  #error! should check (start new_start) cross st.obstacle
                if dis_pre + next_dis < st.global_min:
                    st.global_min = dis_pre + next_dis
                    st.global_path = path_pre + next_path
                    next_path_real = next_path
                    next_start = new_start
        if not next_path_real:
            return float('inf'), []
        else:
            return st.global_min - last_dis, [
                ge.LineString([start, next_start])
            ] + next_path_real
Beispiel #17
0
def simplify_path(points, tolerance):
    if len(points) < 2:
        return points
    line = geometry.LineString(points)
    line = line.simplify(tolerance, preserve_topology=False)
    return list(line.coords)
Beispiel #18
0
def trip_to_geojson(feed: "Feed",
                    trip_id: str,
                    *,
                    include_stops: bool = False) -> Dict:
    """
    Return a GeoJSON representation of the given trip, optionally with
    its stops.

    Parameters
    ----------
    feed : Feed
    trip_id : string
        ID of trip in ``feed.trips``
    include_stops : boolean

    Returns
    -------
    dictionary
        A (decoded) GeoJSON FeatureCollection comprising a Linestring
        feature representing the trip's shape.
        If ``include_stops``, then also include one Point feature for
        each stop  visited by the trip.
        The Linestring feature will contain as properties all the
        columns in ``feed.trips`` pertaining to the given trip,
        and each Point feature will contain as properties all the
        columns in ``feed.stops`` pertaining to the stop,
        except the ``stop_lat`` and ``stop_lon`` properties.

        Return the empty dictionary if the trip has no shape.

    """
    # Get the relevant shapes
    t = feed.trips.copy()
    t = t[t["trip_id"] == trip_id].copy()
    shid = t["shape_id"].iat[0]
    geometry_by_shape = feed.build_geometry_by_shape(use_utm=False,
                                                     shape_ids=[shid])

    if not geometry_by_shape:
        return {}

    features = [{
        "type": "Feature",
        "properties": json.loads(t.to_json(orient="records"))[0],
        "geometry": sg.mapping(sg.LineString(geometry_by_shape[shid])),
    }]

    if include_stops:
        # Get relevant stops and geometrys
        s = feed.get_stops(trip_id=trip_id)
        cols = set(s.columns) - set(["stop_lon", "stop_lat"])
        s = s[list(cols)].copy()
        stop_ids = s["stop_id"].tolist()
        geometry_by_stop = feed.build_geometry_by_stop(stop_ids=stop_ids)
        features.extend([{
            "type":
            "Feature",
            "properties":
            json.loads(
                s[s["stop_id"] == stop_id].to_json(orient="records"))[0],
            "geometry":
            sg.mapping(geometry_by_stop[stop_id]),
        } for stop_id in stop_ids])

    return {"type": "FeatureCollection", "features": features}
Beispiel #19
0
tt2 = t2_on_ds.sel(lat=slice(lats[0], lats[1])).mean(dim='lat')

f=plt.figure(figsize=(11,4))
ax = f.add_subplot(111)
tt2=tt2.values-273.15
tt2[-15:-1]-=2
#tt2[-19:-16]-=1
tt2[-15:-10]+=1
dsp[-20:-12]*=1.2
# plt.plot(ds18_present.lon, (dsp-dsp.min())/(dsp.max()-dsp.min()), color='darkblue', label=ds18_hist.name, marker='o')
# ax.plot(ds18_present.lon, (dsp2-dsp2.min())/(dsp2.max()-dsp2.min()), color='orangered', label=ds18_present.name, marker='o')
ax.plot(ds18_present.lon, dsp, color='k', label=ds18_hist.name, marker='o')
ax.plot(ds18_present.lon, dsp2, color='b', label=ds18_present.name, marker='x')
# ax.plot(ds18_present.lon, (tt-tt.min())/(tt.max()-tt.min()), color='red', label='Temp2007')
# ax.plot(ds18_present.lon, (tt2-tt2.min())/(tt2.max()-tt2.min()), color='orange', label='Temp2011')
geom = shpg.LineString(((coord[0], coord[4]), (coord[1], coord[4])))

geom = shpg.LineString(((coord[0], coord[5]), (coord[1], coord[5])))
ax1 = ax.twinx()
ax1.plot(ds18_present.lon, tt-273.15, color='k', label='Temp2007', linestyle='dotted')
ax1.plot(ds18_present.lon, tt2, color='b', label='Temp2011', linestyle='dotted')
ax.plot(ds18_present.lon,  (temp)/100, color='grey', label='Deforestation')

rpatch = patches.Patch(color='seagreen', label='Forest fraction 2001 (%)')
rpatch2 = patches.Patch(color='grey', label='Deforestation (%)')
topoline = lines.Line2D([],[], color='b', label='LST deforested', linestyle='dotted')
t2 = lines.Line2D([],[], color='k', label='LST forested', linestyle='dotted')

night = lines.Line2D([],[], color='b', label='2011-2015', linestyle='-', marker='x', markersize=5)
day = lines.Line2D([],[], color='k', label='2005-2009', linestyle='--', marker='o', markersize=5)
Beispiel #20
0
rel(188022);
out body;
>;
out skel qt; """
api = overpy.Overpass()
result = api.query(query)

lss = [] #convert ways to linstrings

for ii_w,way in enumerate(result.ways):
    ls_coords = []

    for node in way.nodes:
        ls_coords.append((node.lon,node.lat)) # create a list of node coordinates

    lss.append(geometry.LineString(ls_coords)) # create a LineString from coords


merged = linemerge([*lss]) # merge LineStrings
borders = unary_union(merged) # linestrings to a MultiLineString
polygons = list(polygonize(borders))
philly = geometry.MultiPolygon(polygons)

philly.contains(geometry.Point(-147.7798220, 64.8564400))
philly


df_crash = pd.read_csv('./RawData/PHILADELPHIA_1999/CRASH_1999_Philadelphia.csv')
for i in range(18):
    s = str(i)
    if i < 10:
Beispiel #21
0
        >;
        out skel qt; """.format(id)
        result = api.query(query)

        # Convert ways to linstrings
        lss = []

        for ii_w, way in enumerate(result.ways):
            ls_coords = []

            for node in way.nodes:
                # create a list of node coordinates
                ls_coords.append((node.lon, node.lat))

            # create a LineString from coords
            lss.append(geometry.LineString(ls_coords))

        merged = linemerge([*lss])  # merge LineStrings
        borders = unary_union(merged)  # linestrings to a MultiLineString
        polygons = list(polygonize(borders))
        shapes[area] = geometry.MultiPolygon(polygons)
        dump(shapes[area], open("{:s}.wkb".format(area), "wb"))
        # FIXME: What is the canonical location for cached program supplement
        # data? Should I download it at install-time, when I'm copied somewhere
        # with write access that might never come back?


def contains(shape, coordinates):
    return shape.contains(geometry.Point(*coordinates))

Beispiel #22
0
def serialize(records: Sequence[ocr_record],
              image_name: str = None,
              image_size: Tuple[int, int] = (0, 0),
              writing_mode: str = 'horizontal-tb',
              scripts: Optional[Iterable[str]] = None,
              regions: Optional[Dict[str, List[List[Tuple[int, int]]]]] = None,
              template: str = 'hocr') -> str:
    """
    Serializes a list of ocr_records into an output document.

    Serializes a list of predictions and their corresponding positions by doing
    some hOCR-specific preprocessing and then renders them through one of
    several jinja2 templates.

    Note: Empty records are ignored for serialization purposes.

    Args:
        records (iterable): List of kraken.rpred.ocr_record
        image_name (str): Name of the source image
        image_size (tuple): Dimensions of the source image
        writing_mode (str): Sets the principal layout of lines and the
                            direction in which blocks progress. Valid values
                            are horizontal-tb, vertical-rl, and
                            vertical-lr.
        scripts (list): List of scripts contained in the OCR records
        regions (list): Dictionary mapping region types to a list of region
                        polygons.
        template (str): Selector for the serialization format. May be
                        'hocr' or 'alto'.

    Returns:
            (str) rendered template.
    """
    logger.info(
        f'Serialize {len(records)} records from {image_name} with template {template}.'
    )
    page = {
        'entities': [],
        'size': image_size,
        'name': image_name,
        'writing_mode': writing_mode,
        'scripts': scripts,
        'date': datetime.datetime.now(datetime.timezone.utc).isoformat(),
        'base_dir':
        [rec.base_dir for rec in records][0] if len(records) else None
    }  # type: dict
    seg_idx = 0
    char_idx = 0
    region_map = {}
    idx = 0
    if regions is not None:
        for id, regs in regions.items():
            for reg in regs:
                region_map[idx] = (id, geom.Polygon(reg), reg)
                idx += 1

    # build region and line type dict
    page['types'] = list(
        set(line.tags.values() for line in records if line.tags is not None))
    if regions is not None:
        page['types'].extend(list(regions.keys()))

    is_in_reg = -1
    for idx, record in enumerate(records):
        if record.type == 'baselines':
            l_obj = geom.LineString(record.baseline)
        else:
            l_obj = geom.LineString(record.line)
        reg = list(
            filter(lambda x: is_in_region(l_obj, x[1][1]), region_map.items()))
        if len(reg) == 0:
            cur_ent = page['entities']
        elif reg[0][0] != is_in_reg:
            reg = reg[0]
            is_in_reg = reg[0]
            region = {
                'index': reg[0],
                'bbox': [int(x) for x in reg[1][1].bounds],
                'boundary': [list(x) for x in reg[1][2]],
                'region_type': reg[1][0],
                'lines': [],
                'type': 'region'
            }
            page['entities'].append(region)
            cur_ent = region['lines']

        # set field to indicate the availability of baseline segmentation in
        # addition to bounding boxes
        if record.type == 'baselines':
            page['seg_type'] = 'baselines'
        line = {
            'index': idx,
            'bbox': max_bbox([record.line]),
            'cuts': record.cuts,
            'confidences': record.confidences,
            'recognition': [],
            'boundary': [list(x) for x in record.line],
            'type': 'line'
        }
        if record.tags is not None:
            line['tags'] = record.tags
        if record.type == 'baselines':
            line['baseline'] = [list(x) for x in record.baseline]
        splits = regex.split(r'(\s+)', record.prediction)
        line_offset = 0
        logger.debug(f'Record contains {len(splits)} segments')
        for segment in splits:
            if len(segment) == 0:
                continue
            seg_bbox = max_bbox(record.cuts[line_offset:line_offset +
                                            len(segment)])
            seg_struct = {
                'bbox':
                seg_bbox,
                'confidences':
                record.confidences[line_offset:line_offset + len(segment)],
                'cuts':
                record.cuts[line_offset:line_offset + len(segment)],
                'text':
                segment,
                'recognition': [{
                    'bbox': max_bbox([cut]),
                    'boundary': cut,
                    'confidence': conf,
                    'text': char,
                    'index': cid
                } for conf, cut, char, cid in zip(
                    record.confidences[line_offset:line_offset + len(segment)],
                    record.cuts[line_offset:line_offset + len(segment)],
                    segment, range(char_idx, char_idx + len(segment)))],
                'index':
                seg_idx
            }
            # compute convex hull of all characters in segment
            if record.type == 'baselines':
                pols = []
                for x in record.cuts[line_offset:line_offset + len(segment)]:
                    try:
                        pol = geom.Polygon(x)
                    except ValueError:
                        pol = geom.LineString(x).buffer(0.5, cap_style=2)
                    if pol.area == 0.0:
                        pol = pol.buffer(0.5)
                    # if area is still 0 it's probably a point
                    if pol.area == 0.0:
                        pol = geom.Point(x[0]).buffer(0.5)
                    pols.append(pol)
                pols = unary_union(pols)
                coords = np.array(pols.convex_hull.exterior.coords,
                                  dtype=np.uint).tolist()
                seg_struct['boundary'] = coords
            line['recognition'].append(seg_struct)
            char_idx += len(segment)
            seg_idx += 1
            line_offset += len(segment)
        cur_ent.append(line)

    # No records but there are regions -> serialize all regions
    if not records and regions:
        logger.debug(
            f'No lines given but {len(region_map)}. Serialize all regions.')
        for reg in region_map.items():
            region = {
                'index': reg[0],
                'bbox': [int(x) for x in reg[1][1].bounds],
                'boundary': [list(x) for x in reg[1][2]],
                'region_type': reg[1][0],
                'lines': [],
                'type': 'region'
            }
            page['entities'].append(region)

    logger.debug('Initializing jinja environment.')
    env = Environment(loader=PackageLoader('kraken', 'templates'),
                      trim_blocks=True,
                      lstrip_blocks=False,
                      autoescape=True)
    env.tests['whitespace'] = str.isspace
    env.filters['rescale'] = _rescale
    logger.debug('Retrieving template.')
    tmpl = env.get_template(template)
    logger.debug('Rendering data.')
    return tmpl.render(page=page)
Beispiel #23
0
def _calc_roi(line, bounds, baselines, suppl_obj, p_dir):
    # interpolate baseline
    ls = geom.LineString(line)
    ip_line = [line[0]]
    dist = 10
    while dist < ls.length:
        ip_line.append(np.array(ls.interpolate(dist)))
        dist += 10
    ip_line.append(line[-1])
    ip_line = np.array(ip_line)
    upper_bounds_intersects = []
    bottom_bounds_intersects = []
    for point in ip_line:
        upper_bounds_intersects.append(
            _ray_intersect_boundaries(point, (p_dir * (-1, 1))[::-1],
                                      bounds + 1).astype('int'))
        bottom_bounds_intersects.append(
            _ray_intersect_boundaries(point, (p_dir * (1, -1))[::-1],
                                      bounds + 1).astype('int'))
    # build polygon between baseline and bbox intersects
    upper_polygon = geom.Polygon(ip_line.tolist() + upper_bounds_intersects)
    bottom_polygon = geom.Polygon(ip_line.tolist() + bottom_bounds_intersects)

    # select baselines at least partially in each polygon
    side_a = [geom.LineString(upper_bounds_intersects)]
    side_b = [geom.LineString(bottom_bounds_intersects)]

    for adj_line in baselines + suppl_obj:
        adj_line = geom.LineString(adj_line)
        if upper_polygon.intersects(adj_line):
            side_a.append(adj_line)
        elif bottom_polygon.intersects(adj_line):
            side_b.append(adj_line)
    side_a = unary_union(side_a).buffer(1).boundary
    side_b = unary_union(side_b).buffer(1).boundary

    def _find_closest_point(pt, intersects):
        spt = geom.Point(pt)
        if intersects.type == 'MultiPoint':
            return min([p for p in intersects.geoms],
                       key=lambda x: spt.distance(x))
        elif intersects.type == 'Point':
            return intersects
        elif intersects.type == 'GeometryCollection' and len(intersects) > 0:
            t = min([p for p in intersects], key=lambda x: spt.distance(x))
            if t == 'Point':
                return t
            else:
                return nearest_points(spt, t)[1]
        else:
            raise Exception(
                f'No intersection with boundaries. Shapely intersection object: {intersects.wkt}'
            )

    env_up = []
    env_bottom = []
    # find orthogonal (to linear regression) intersects with adjacent objects to complete roi
    for point, upper_bounds_intersect, bottom_bounds_intersect in zip(
            ip_line, upper_bounds_intersects, bottom_bounds_intersects):
        upper_limit = _find_closest_point(
            point,
            geom.LineString([point,
                             upper_bounds_intersect]).intersection(side_a))
        bottom_limit = _find_closest_point(
            point,
            geom.LineString([point,
                             bottom_bounds_intersect]).intersection(side_b))
        env_up.append(upper_limit.coords[0])
        env_bottom.append(bottom_limit.coords[0])
    env_up = np.array(env_up, dtype='uint')
    env_bottom = np.array(env_bottom, dtype='uint')
    return env_up, env_bottom
Beispiel #24
0
for rtid in ROUTES_ACTIVE.keys():

    pids = ROUTES_PIDS[rtid]
    patternMainJSON = tApi.getPattern(pids[0])
    patternMain = [Stop(pt['lon'], pt['lat']) for pt in patternMainJSON['pt']]
    patternMain = resampleStops(patternMain, 100)

    routePatterns[rtid + '-0'] = patternMain
    if len(pids) > 1:
        patternSecondaryJSON = tApi.getPattern(pids[1])
        patternSecondary = [
            Stop(pt['lon'], pt['lat']) for pt in patternSecondaryJSON['pt']
        ]

        line = geom.LineString(patternMain)
        patternSecondaryFilter = []
        numBranches = 0
        for point in patternSecondary:
            geompt = geom.Point(point.lon, point.lat)
            unitlessDist = line.distance(geompt)
            dist = coordDist(unitlessDist)
            if dist > 100:
                patternSecondaryFilter.append(point)
        routePatterns[rtid + '-1'] = patternSecondaryFilter

temp61s = routePatterns['61s-1']
routePatterns['61s-1'] = filterStopsBoundingBox(temp61s, -80, -79.96, 40.43,
                                                40.445)
routePatterns['61s-2'] = filterStopsBoundingBox(temp61s, -79.94, -79.90, 40.41,
                                                40.44)
Beispiel #25
0
def polygonal_reading_order(
    lines: Sequence[Tuple[List[Tuple[int, int]], List[Tuple[int, int]]]],
    text_direction: str = 'lr',
    regions: Optional[Sequence[List[Tuple[int, int]]]] = None
) -> Sequence[Tuple[List[Tuple[int, int]], List[Tuple[int, int]]]]:
    """
    Given a list of baselines and regions, calculates the correct reading order
    and applies it to the input.

    Args:
        lines (Sequence): List of tuples containing the baseline and its
                          polygonization.
        regions (Sequence): List of region polygons.
        text_direction (str): Set principal text direction for column ordering.
                              Can be 'lr' or 'rl'

    Returns:
        A reordered input.
    """
    bounds = []
    if regions is not None:
        r = [geom.Polygon(reg) for reg in regions]
    else:
        r = []
    region_lines = [[] for _ in range(len(r))]
    indizes = {}
    for line_idx, line in enumerate(lines):
        s_line = geom.LineString(line[1])
        in_region = False
        for idx, reg in enumerate(r):
            if is_in_region(s_line, reg):
                region_lines[idx].append(
                    (line_idx, (slice(s_line.bounds[1], s_line.bounds[3]),
                                slice(s_line.bounds[0], s_line.bounds[2]))))
                in_region = True
                break
        if not in_region:
            bounds.append((slice(s_line.bounds[1], s_line.bounds[3]),
                           slice(s_line.bounds[0], s_line.bounds[2])))
            indizes[line_idx] = ('line', line)
    # order everything in regions
    intra_region_order = [[] for _ in range(len(r))]
    for idx, reg in enumerate(r):
        if len(region_lines[idx]) > 0:
            order = reading_order([x[1] for x in region_lines[idx]],
                                  text_direction)
            lsort = topsort(order)
            intra_region_order[idx] = [region_lines[idx][i][0] for i in lsort]
            reg = reg.bounds
            bounds.append((slice(reg[1], reg[3]), slice(reg[0], reg[2])))
            indizes[line_idx + idx + 1] = ('region', idx)
    # order unassigned lines and regions
    order = reading_order(bounds, text_direction)
    lsort = topsort(order)
    sidz = sorted(indizes.keys())
    lsort = [sidz[i] for i in lsort]
    ordered_lines = []
    for i in lsort:
        if indizes[i][0] == 'line':
            ordered_lines.append(indizes[i][1])
        else:
            ordered_lines.extend(lines[x]
                                 for x in intra_region_order[indizes[i][1]])
    return ordered_lines
Beispiel #26
0
    def _draw_gridliner(self, nx=None, ny=None, renderer=None):
        """Create Artists for all visible elements and add to our Axes."""
        # Check status
        if self._plotted:
            return
        self._plotted = True

        # Inits
        lon_lim, lat_lim = self._axes_domain(nx=nx, ny=ny)

        transform = self._crs_transform()
        rc_params = matplotlib.rcParams
        n_steps = self.n_steps
        crs = self.crs

        # Get nice ticks within crs domain
        lon_ticks = self.xlocator.tick_values(lon_lim[0], lon_lim[1])
        lat_ticks = self.ylocator.tick_values(lat_lim[0], lat_lim[1])
        lon_ticks = [
            value for value in lon_ticks
            if value >= max(lon_lim[0], crs.x_limits[0])
            and value <= min(lon_lim[1], crs.x_limits[1])
        ]
        lat_ticks = [
            value for value in lat_ticks
            if value >= max(lat_lim[0], crs.y_limits[0])
            and value <= min(lat_lim[1], crs.y_limits[1])
        ]

        #####################
        # Gridlines drawing #
        #####################

        collection_kwargs = self.collection_kwargs
        if collection_kwargs is None:
            collection_kwargs = {}
        collection_kwargs = collection_kwargs.copy()
        collection_kwargs['transform'] = transform
        # XXX doesn't gracefully handle lw vs linewidth aliases...
        collection_kwargs.setdefault('color', rc_params['grid.color'])
        collection_kwargs.setdefault('linestyle', rc_params['grid.linestyle'])
        collection_kwargs.setdefault('linewidth', rc_params['grid.linewidth'])

        # Meridians
        lat_min, lat_max = lat_lim
        if lat_ticks:
            lat_min = min(lat_min, min(lat_ticks))
            lat_max = max(lat_max, max(lat_ticks))
        lon_lines = np.empty((len(lon_ticks), n_steps, 2))
        lon_lines[:, :, 0] = np.array(lon_ticks)[:, np.newaxis]
        lon_lines[:, :, 1] = np.linspace(lat_min, lat_max,
                                         n_steps)[np.newaxis, :]

        if self.xlines:
            nx = len(lon_lines) + 1
            # XXX this bit is cartopy specific. (for circular longitudes)
            # Purpose: omit plotting the last x line,
            # as it may overlap the first.
            if (isinstance(crs, Projection)
                    and isinstance(crs, _RectangularProjection)
                    and abs(np.diff(lon_lim)) == abs(np.diff(crs.x_limits))):
                nx -= 1
            lon_lc = mcollections.LineCollection(lon_lines,
                                                 **collection_kwargs)
            self.xline_artists.append(lon_lc)
            self.axes.add_collection(lon_lc, autolim=False)

        # Parallels
        lon_min, lon_max = lon_lim
        if lon_ticks:
            lon_min = min(lon_min, min(lon_ticks))
            lon_max = max(lon_max, max(lon_ticks))
        lat_lines = np.empty((len(lat_ticks), n_steps, 2))
        lat_lines[:, :, 0] = np.linspace(lon_min, lon_max,
                                         n_steps)[np.newaxis, :]
        lat_lines[:, :, 1] = np.array(lat_ticks)[:, np.newaxis]
        if self.ylines:
            lat_lc = mcollections.LineCollection(lat_lines,
                                                 **collection_kwargs)
            self.yline_artists.append(lat_lc)
            self.axes.add_collection(lat_lc, autolim=False)

        #################
        # Label drawing #
        #################

        self.bottom_label_artists = []
        self.top_label_artists = []
        self.left_label_artists = []
        self.right_label_artists = []
        if not (self.left_labels or self.right_labels or self.bottom_labels
                or self.top_labels):
            return
        self._assert_can_draw_ticks()

        # Get the real map boundaries
        map_boundary_vertices = self.axes.patch.get_path().vertices
        map_boundary = sgeom.Polygon(map_boundary_vertices)

        self._labels = []

        if self.x_inline:
            y_midpoints = self._find_midpoints(lat_lim, lat_ticks)
        if self.y_inline:
            x_midpoints = self._find_midpoints(lon_lim, lon_ticks)

        for lonlat, lines, line_ticks, formatter, label_style in (
            ('lon', lon_lines, lon_ticks, self.xformatter, self.xlabel_style),
            ('lat', lat_lines, lat_ticks, self.yformatter, self.ylabel_style)):

            formatter.set_locs(line_ticks)

            for line, tick_value in zip(lines, line_ticks):
                # Intersection of line with map boundary
                line = self.axes.projection.transform_points(
                    crs, line[:, 0], line[:, 1])[:, :2]
                infs = np.isinf(line).any(axis=1)
                line = line.compress(~infs, axis=0)
                if line.size == 0:
                    continue
                line = sgeom.LineString(line)
                if line.intersects(map_boundary):
                    intersection = line.intersection(map_boundary)
                    del line
                    if intersection.is_empty:
                        continue
                    if isinstance(intersection, sgeom.MultiPoint):
                        if len(intersection) < 2:
                            continue
                        tails = [[(pt.x, pt.y) for pt in intersection[:2]]]
                        heads = [[(pt.x, pt.y)
                                  for pt in intersection[-1:-3:-1]]]
                    elif isinstance(intersection,
                                    (sgeom.LineString, sgeom.MultiLineString)):
                        if isinstance(intersection, sgeom.LineString):
                            intersection = [intersection]
                        elif len(intersection) > 4:
                            # Gridline and map boundary are parallel
                            # and they intersect themselves too much
                            # it results in a multiline string
                            # that must be converted to a single linestring.
                            # This is an empirical workaround for a problem
                            # that can probably be solved in a cleaner way.
                            xy = np.append(intersection[0],
                                           intersection[-1],
                                           axis=0)
                            intersection = [sgeom.LineString(xy)]
                        tails = []
                        heads = []
                        for inter in intersection:
                            if len(inter.coords) < 2:
                                continue
                            tails.append(inter.coords[:2])
                            heads.append(inter.coords[-1:-3:-1])
                        if not tails:
                            continue
                    elif isinstance(intersection,
                                    sgeom.collection.GeometryCollection):
                        # This is a collection of Point and LineString that
                        # represent the same gridline.
                        # We only consider the first geometries, merge their
                        # coordinates and keep first two points to get only one
                        # tail ...
                        xy = []
                        for geom in intersection.geoms:
                            for coord in geom.coords:
                                xy.append(coord)
                                if len(xy) == 2:
                                    break
                            if len(xy) == 2:
                                break
                        tails = [xy]
                        # ... and the last geometries, merge their coordinates
                        # and keep last two points to get only one head.
                        xy = []
                        for geom in reversed(intersection.geoms):
                            for coord in reversed(geom.coords):
                                xy.append(coord)
                                if len(xy) == 2:
                                    break
                            if len(xy) == 2:
                                break
                        heads = [xy]
                    else:
                        warnings.warn(
                            'Unsupported intersection geometry for gridline '
                            'labels: ' + intersection.__class__.__name__)
                        continue
                    del intersection

                    # Loop on head and tail and plot label by extrapolation
                    for tail, head in zip(tails, heads):
                        for i, (pt0, pt1) in enumerate([tail, head]):
                            kw, angle, loc = self._segment_to_text_specs(
                                pt0, pt1, lonlat)
                            if not getattr(self, loc + '_labels'):
                                continue
                            kw.update(label_style,
                                      bbox={
                                          'pad': 0,
                                          'visible': False
                                      })
                            text = formatter(tick_value)

                            if self.y_inline and lonlat == 'lat':
                                # 180 degrees isn't formatted with a
                                # suffix and adds confusion if it's inline
                                if abs(tick_value) == 180:
                                    continue
                                x = x_midpoints[i]
                                y = tick_value
                                kw.update(clip_on=True)
                                y_set = True
                            else:
                                x = pt0[0]
                                y_set = False

                            if self.x_inline and lonlat == 'lon':
                                if abs(tick_value) == 180:
                                    continue
                                x = tick_value
                                y = y_midpoints[i]
                                kw.update(clip_on=True)
                            elif not y_set:
                                y = pt0[1]

                            tt = self.axes.text(x, y, text, **kw)
                            tt._angle = angle
                            priority = (((lonlat == 'lon')
                                         and loc in ('bottom', 'top'))
                                        or ((lonlat == 'lat')
                                            and loc in ('left', 'right')))
                            self._labels.append((lonlat, priority, tt))
                            getattr(self, loc + '_label_artists').append(tt)

        # Sort labels
        if self._labels:
            self._labels.sort(key=operator.itemgetter(0), reverse=True)
            self._update_labels_visibility(renderer)
Beispiel #27
0
def extract_polygons(im: Image.Image, bounds: Dict[str, Any]) -> Image.Image:
    """
    Yields the subimages of image im defined in the list of bounding polygons
    with baselines preserving order.

    Args:
        im: Input image
        bounds: A list of dicts in baseline:
            ```
            {'type': 'baselines',
             'lines': [{'baseline': [[x_0, y_0], ... [x_n, y_n]],
                        'boundary': [[x_0, y_0], ... [x_n, y_n]]},
                       ....]
            }
            ```
            or bounding box format:
            ```
            {'boxes': [[x_0, y_0, x_1, y_1], ...],
             'text_direction': 'horizontal-lr'}
            ```

    Yields:
        The extracted subimage
    """
    if 'type' in bounds and bounds['type'] == 'baselines':
        # select proper interpolation scheme depending on shape
        if im.mode == '1':
            order = 0
            im = im.convert('L')
        else:
            order = 1
        im = np.array(im)

        for line in bounds['lines']:
            if line['boundary'] is None:
                raise KrakenInputException('No boundary given for line')
            pl = np.array(line['boundary'])
            baseline = np.array(line['baseline'])
            c_min, c_max = int(pl[:, 0].min()), int(pl[:, 0].max())
            r_min, r_max = int(pl[:, 1].min()), int(pl[:, 1].max())

            if (pl < 0).any() or (pl.max(axis=0)[::-1] >= im.shape[:2]).any():
                raise KrakenInputException(
                    'Line polygon outside of image bounds')
            if (baseline < 0).any() or (baseline.max(axis=0)[::-1] >=
                                        im.shape[:2]).any():
                raise KrakenInputException('Baseline outside of image bounds')

            # fast path for straight baselines requiring only rotation
            if len(baseline) == 2:
                baseline = baseline.astype(float)
                # calculate direction vector
                lengths = np.linalg.norm(np.diff(baseline.T), axis=0)
                p_dir = np.mean(np.diff(baseline.T) * lengths / lengths.sum(),
                                axis=1)
                p_dir = (p_dir.T / np.sqrt(np.sum(p_dir**2, axis=-1)))
                angle = np.arctan2(p_dir[1], p_dir[0])
                patch = im[r_min:r_max + 1, c_min:c_max + 1].copy()
                offset_polygon = pl - (c_min, r_min)
                r, c = draw.polygon(offset_polygon[:, 1], offset_polygon[:, 0])
                mask = np.zeros(patch.shape[:2], dtype=bool)
                mask[r, c] = True
                patch[mask != True] = 0
                extrema = offset_polygon[(0, -1), :]
                # scale line image to max 600 pixel width
                tform, rotated_patch = _rotate(patch,
                                               angle,
                                               center=extrema[0],
                                               scale=1.0,
                                               cval=0)
                i = Image.fromarray(rotated_patch.astype('uint8'))
            # normal slow path with piecewise affine transformation
            else:
                if len(pl) > 50:
                    pl = approximate_polygon(pl, 2)
                full_polygon = subdivide_polygon(pl, preserve_ends=True)
                pl = geom.MultiPoint(full_polygon)

                bl = zip(baseline[:-1:], baseline[1::])
                bl = [geom.LineString(x) for x in bl]
                cum_lens = np.cumsum([0] + [line.length for line in bl])
                # distance of intercept from start point and number of line segment
                control_pts = []
                for point in pl.geoms:
                    npoint = np.array(point.coords)[0]
                    line_idx, dist, intercept = min(
                        ((idx, line.project(point),
                          np.array(
                              line.interpolate(line.project(point)).coords))
                         for idx, line in enumerate(bl)),
                        key=lambda x: np.linalg.norm(npoint - x[2]))
                    # absolute distance from start of line
                    line_dist = cum_lens[line_idx] + dist
                    intercept = np.array(intercept)
                    # side of line the point is at
                    side = np.linalg.det(
                        np.array([[
                            baseline[line_idx + 1][0] - baseline[line_idx][0],
                            npoint[0] - baseline[line_idx][0]
                        ],
                                  [
                                      baseline[line_idx + 1][1] -
                                      baseline[line_idx][1],
                                      npoint[1] - baseline[line_idx][1]
                                  ]]))
                    side = np.sign(side)
                    # signed perpendicular distance from the rectified distance
                    per_dist = side * np.linalg.norm(npoint - intercept)
                    control_pts.append((line_dist, per_dist))
                # calculate baseline destination points
                bl_dst_pts = baseline[0] + np.dstack(
                    (cum_lens, np.zeros_like(cum_lens)))[0]
                # calculate bounding polygon destination points
                pol_dst_pts = np.array([
                    baseline[0] + (line_dist, per_dist)
                    for line_dist, per_dist in control_pts
                ])
                # extract bounding box patch
                c_dst_min, c_dst_max = int(pol_dst_pts[:, 0].min()), int(
                    pol_dst_pts[:, 0].max())
                r_dst_min, r_dst_max = int(pol_dst_pts[:, 1].min()), int(
                    pol_dst_pts[:, 1].max())
                output_shape = np.around(
                    (r_dst_max - r_dst_min + 1, c_dst_max - c_dst_min + 1))
                patch = im[r_min:r_max + 1, c_min:c_max + 1].copy()
                # offset src points by patch shape
                offset_polygon = full_polygon - (c_min, r_min)
                offset_baseline = baseline - (c_min, r_min)
                # offset dst point by dst polygon shape
                offset_bl_dst_pts = bl_dst_pts - (c_dst_min, r_dst_min)
                offset_pol_dst_pts = pol_dst_pts - (c_dst_min, r_dst_min)
                # mask out points outside bounding polygon
                mask = np.zeros(patch.shape[:2], dtype=bool)
                r, c = draw.polygon(offset_polygon[:, 1], offset_polygon[:, 0])
                mask[r, c] = True
                patch[mask != True] = 0
                # estimate piecewise transform
                src_points = np.concatenate((offset_baseline, offset_polygon))
                dst_points = np.concatenate(
                    (offset_bl_dst_pts, offset_pol_dst_pts))
                tform = PiecewiseAffineTransform()
                tform.estimate(src_points, dst_points)
                o = warp(patch,
                         tform.inverse,
                         output_shape=output_shape,
                         preserve_range=True,
                         order=order)
                i = Image.fromarray(o.astype('uint8'))
            yield i.crop(i.getbbox()), line
    else:
        if bounds['text_direction'].startswith('vertical'):
            angle = 90
        else:
            angle = 0
        for box in bounds['boxes']:
            if isinstance(box, tuple):
                box = list(box)
            if (box < [0, 0, 0, 0] or box[::2] >= [im.size[0], im.size[0]]
                    or box[1::2] >= [im.size[1], im.size[1]]):
                logger.error('bbox {} is outside of image bounds {}'.format(
                    box, im.size))
                raise KrakenInputException('Line outside of image bounds')
            yield im.crop(box).rotate(angle, expand=True), box
Beispiel #28
0
def build_segmented_hinge(desired_width, desired_segment_length, desired_gap):
    hinge1 = build_segments(desired_width, desired_segment_length, desired_gap)
    hinge2 = [sg.LineString(item) for item in hinge1]
    hinge3 = Layer(*hinge2)
    return hinge3
Beispiel #29
0
def execute(context):
    output_path = context.config("output_path")
    output_prefix = context.config("output_prefix")

    # Prepare households
    df_households = context.stage("synthesis.population.enriched").rename(
        columns={
            "household_income": "income"
        }).drop_duplicates("household_id")

    df_households = df_households[[
        "household_id", "car_availability", "bike_availability", "income",
        "census_household_id"
    ]]

    df_households.to_csv("%s/%shouseholds.csv" % (output_path, output_prefix),
                         sep=";",
                         index=None)

    # Prepare persons
    df_persons = context.stage("synthesis.population.enriched").rename(
        columns={"has_license": "has_driving_license"})

    df_persons = df_persons[[
        "person_id", "household_id", "age", "employed", "sex",
        "socioprofessional_class", "has_driving_license",
        "has_pt_subscription", "census_person_id", "hts_id"
    ]]

    df_persons.to_csv("%s/%spersons.csv" % (output_path, output_prefix),
                      sep=";",
                      index=None)

    # Prepare activities
    df_activities = context.stage("synthesis.population.activities").rename(
        columns={"trip_index": "following_trip_index"})

    df_activities = pd.merge(df_activities,
                             df_persons[["person_id", "household_id"]],
                             on="person_id")

    df_activities["preceding_trip_index"] = df_activities[
        "following_trip_index"].shift(1)
    df_activities.loc[df_activities["is_first"], "preceding_trip_index"] = -1
    df_activities["preceding_trip_index"] = df_activities[
        "preceding_trip_index"].astype(int)

    df_activities = df_activities[[
        "person_id", "household_id", "activity_index", "preceding_trip_index",
        "following_trip_index", "purpose", "start_time", "end_time",
        "is_first", "is_last"
    ]]

    df_activities.to_csv("%s/%sactivities.csv" % (output_path, output_prefix),
                         sep=";",
                         index=None)

    # Prepare trips
    df_trips = context.stage("synthesis.population.trips").rename(
        columns={
            "is_first_trip": "is_first",
            "is_last_trip": "is_last"
        })

    df_trips["preceding_activity_index"] = df_trips["trip_index"]
    df_trips["following_activity_index"] = df_trips["trip_index"] + 1

    df_trips = df_trips[[
        "person_id", "trip_index", "preceding_activity_index",
        "following_activity_index", "departure_time", "arrival_time", "mode",
        "preceding_purpose", "following_purpose", "is_first", "is_last"
    ]]

    df_trips.to_csv("%s/%strips.csv" % (output_path, output_prefix),
                    sep=";",
                    index=None)

    if context.config("generate_vehicles_file"):
        # Prepare vehicles
        df_vehicle_types, df_vehicles = context.stage(
            "synthesis.vehicles.selected")

        df_vehicle_types.to_csv("%s/%svehicle_types.csv" %
                                (output_path, output_prefix),
                                sep=";",
                                index=None)
        df_vehicles.to_csv("%s/%svehicles.csv" % (output_path, output_prefix),
                           sep=";",
                           index=None)

    # Prepare spatial data sets
    df_locations = context.stage("synthesis.population.spatial.locations")[[
        "person_id", "activity_index", "geometry"
    ]]

    df_activities = pd.merge(
        df_activities,
        df_locations[["person_id", "activity_index", "geometry"]],
        how="left",
        on=["person_id", "activity_index"])

    # Write spatial activities
    df_spatial = gpd.GeoDataFrame(df_activities, crs="EPSG:2154")
    df_spatial["purpose"] = df_spatial["purpose"].astype(str)
    df_spatial.to_file("%s/%sactivities.gpkg" % (output_path, output_prefix),
                       driver="GPKG")

    # Write spatial homes
    df_spatial[df_spatial["purpose"] == "home"].drop_duplicates(
        "household_id")[["household_id", "geometry"]].to_file(
            "%s/%shomes.gpkg" % (output_path, output_prefix), driver="GPKG")

    # Write spatial commutes
    df_spatial = pd.merge(
        df_spatial[df_spatial["purpose"] == "home"].drop_duplicates(
            "person_id")[["person_id", "geometry"
                          ]].rename(columns={"geometry": "home_geometry"}),
        df_spatial[df_spatial["purpose"] == "work"].drop_duplicates(
            "person_id")[["person_id", "geometry"
                          ]].rename(columns={"geometry": "work_geometry"}))

    df_spatial["geometry"] = [
        geo.LineString(od)
        for od in zip(df_spatial["home_geometry"], df_spatial["work_geometry"])
    ]

    df_spatial = df_spatial.drop(columns=["home_geometry", "work_geometry"])
    df_spatial.to_file("%s/%scommutes.gpkg" % (output_path, output_prefix),
                       driver="GPKG")

    # Write spatial trips
    df_spatial = pd.merge(
        df_trips,
        df_locations[["person_id", "activity_index", "geometry"]].rename(
            columns={
                "activity_index": "preceding_activity_index",
                "geometry": "preceding_geometry"
            }),
        how="left",
        on=["person_id", "preceding_activity_index"])

    df_spatial = pd.merge(
        df_spatial,
        df_locations[["person_id", "activity_index", "geometry"]].rename(
            columns={
                "activity_index": "following_activity_index",
                "geometry": "following_geometry"
            }),
        how="left",
        on=["person_id", "following_activity_index"])

    df_spatial["geometry"] = [
        geo.LineString(od) for od in zip(df_spatial["preceding_geometry"],
                                         df_spatial["following_geometry"])
    ]

    df_spatial = df_spatial.drop(
        columns=["preceding_geometry", "following_geometry"])

    df_spatial = gpd.GeoDataFrame(df_spatial, crs="EPSG:2154")
    df_spatial["following_purpose"] = df_spatial["following_purpose"].astype(
        str)
    df_spatial["preceding_purpose"] = df_spatial["preceding_purpose"].astype(
        str)
    df_spatial["mode"] = df_spatial["mode"].astype(str)
    df_spatial.to_file("%s/%strips.gpkg" % (output_path, output_prefix),
                       driver="GPKG")
Beispiel #30
0
 def reversed(self):
     return RunningStitch(shgeo.LineString(reversed(self.path.coords)),
                          self.original_element)