Exemple #1
0
def elementary_volumes(holes_,
                       starts_,
                       ends_,
                       hole_ids_,
                       node_ids_,
                       nodes_,
                       end_ids_,
                       end_geoms_,
                       end_holes_,
                       srid_=32632,
                       end_node_relative_distance=0.3,
                       end_node_relative_thickness=.3):

    DEBUG = False
    PRECI = 6
    debug_files = []

    nodes = {
        id_: wkb.loads(bytes.fromhex(geom))
        for id_, geom in zip(node_ids_, nodes_)
    }
    ends = defaultdict(list)
    end_holes = defaultdict(list)
    for id_, geom, hole_id in zip(end_ids_, end_geoms_, end_holes_):
        ends[id_].append(wkb.loads(bytes.fromhex(geom)))
        end_holes[id_].append(hole_id)
    holes = {n: h for n, h in zip(node_ids_, hole_ids_)}
    edges = [(s, e) for s, e in zip(starts_, ends_)]
    #assert(len(edges) == len(set(edges)))
    #assert(len(holes_) == 3)
    #assert(set(hole_ids_).intersection(set(holes_)) == set(hole_ids_))
    #assert(set(end_holes_).intersection(set(holes_)) == set(end_holes_))

    # translate everything close to origin to avoid numerical issues
    translation = None
    for id_ in nodes.keys():
        if translation is None:
            translation = nodes[id_].coords[0]
        nodes[id_] = translate(nodes[id_], -translation[0], -translation[1],
                               -translation[2])

    for id_ in ends.keys():
        for i in range(len(ends[id_])):
            ends[id_][i] = translate(ends[id_][i], -translation[0],
                                     -translation[1], -translation[2])

    graph = defaultdict(set)  # undirected (edge in both directions)
    for e in edges:
        graph[e[0]].add(e[1])
        graph[e[1]].add(e[0])

    # two connected edges form a ring
    # /!\ do not do that for complex trousers configuration, this will
    # connect things that should not be connected
    #
    # indead it is stupid to do that here DO NOT TRY IT AGAIN
    #

    triangles = set()
    triangle_edges = set()
    triangle_nodes = set()
    for e in edges:
        common_neigbors = graph[e[0]].intersection(graph[e[1]])
        for n in common_neigbors:
            tri = tuple((i for _, i in sorted(
                zip((holes[e[0]], holes[e[1]], holes[n]), (e[0], e[1], n)))))
            triangles.add(tri)
            triangle_edges.add(tri[0:2])
            triangle_edges.add(tri[1:3])
            triangle_edges.add(tri[0:1] + tri[2:3])
            triangle_nodes.update(tri)

    # compute face offset direction for termination corners
    # termination coners are nodes that are not part of a triangle
    # and that have at leat 2 incident edges that are not in the same
    # face (i.e. different holes)
    unused_nodes = set(nodes.keys()).difference(triangle_nodes)
    offsets = {
        nodes[n].coords[0]: ends[n][0].coords[0]
        for n, l in list(ends.items()) if len(l) == 1
    }
    offsets.update({
        nodes[n].coords[-1]: ends[n][0].coords[-1]
        for n, l in list(ends.items()) if len(l) == 1
    })
    for n in unused_nodes:
        p = pair_of_non_coplanar_neighbors(n, graph, holes)
        if p:
            A, B, C = array(nodes[n].coords[0][:2]), array(
                nodes[p[0]].coords[0][:2]), array(nodes[p[1]].coords[0][:2])
            c = average(array(nodes[n].coords), (0, ))
            u = .5 * (normalized(B - A) +
                      normalized(C - A)) * end_node_relative_distance * .5 * (
                          norm(B - A) + norm(C - A))
            thickness = abs(nodes[n].coords[0][2] - nodes[n].coords[-1][2])
            end_node_thickness = end_node_relative_thickness * thickness
            offsets[nodes[n].coords[0]] = tuple(c +
                                                array((u[0], u[1], +.5 *
                                                       end_node_thickness)))
            offsets[nodes[n].coords[-1]] = tuple(c +
                                                 array((u[0], u[1], -.5 *
                                                        end_node_thickness)))

    if DEBUG:
        open('/tmp/offsets.vtk', 'w').write(
            to_vtk(
                MultiLineString([n for l in list(ends.values())
                                 for n in l]).wkb_hex))

    sorted_holes = sorted(holes_)
    # face origin is the lowest bottom of the node in the first hole
    # face normal is

    face_idx = -1
    lines = []
    faces = defaultdict(list)
    result = []
    termination = []
    for hl, hr, other_hole in ((sorted_holes[0], sorted_holes[1],
                                sorted_holes[2]),
                               (sorted_holes[1], sorted_holes[2],
                                sorted_holes[0]), (sorted_holes[0],
                                                   sorted_holes[2],
                                                   sorted_holes[1])):
        face_idx += 1
        direct_orientation = (hl, hr) == (holes_[0], holes_[1]) or (
            hl, hr) == (holes_[1], holes_[2]) or (hl, hr) == (holes_[2],
                                                              holes_[0])

        face_edges = list(
            set([(s, e) if holes[s] == hl else (e, s)
                 for s in list(graph.keys()) for e in graph[s]
                 if holes[s] in (hl, hr) and holes[e] in (hl, hr)]))

        if not len(face_edges):
            continue

        face_lines = []
        for e in face_edges:
            face_lines.append(
                Line([nodes[e[0]].coords[0], nodes[e[1]].coords[0]], Line.TOP))
            face_lines.append(
                Line([nodes[e[0]].coords[1], nodes[e[1]].coords[1]],
                     Line.BOTTOM))

        # split lines
        for i, j in combinations(list(range(len(face_lines))), 2):
            assert (face_lines[i].side != Line.VERTICAL
                    and face_lines[j].side != Line.VERTICAL)
            p = sym_split(face_lines[i].points, face_lines[j].points)
            if p and p not in offsets:
                if face_lines[i].points[0] in offsets and face_lines[i].points[-1] in offsets\
                        and face_lines[j].points[0] in offsets and face_lines[j].points[-1] in offsets:
                    splt = sym_split(
                        offset_coords(offsets, [
                            face_lines[i].points[0], face_lines[i].points[-1]
                        ]),
                        offset_coords(offsets, [
                            face_lines[j].points[0], face_lines[j].points[-1]
                        ]))
                    offsets[p] = splt if splt else p
                else:
                    offsets[p] = p

        # split in middle
        for i in range(len(face_lines)):
            assert (face_lines[i].side != Line.VERTICAL)
            p = face_lines[i].midpoint_split()
            if p and p not in offsets:
                if face_lines[i].points[0] in offsets and face_lines[i].points[
                        -1] in offsets:
                    offsets[p] = tuple(
                        .5 * (array(offsets[face_lines[i].points[0]]) +
                              array(offsets[face_lines[i].points[-1]])))
                else:
                    offsets[p] = p

        for k, n in list(nodes.items()):
            if holes[k] in (hl, hr):
                face_lines.append(
                    Line([n.coords[0], n.coords[1]], Line.VERTICAL))

        # select the topmost edge:
        top_altitude = -INF
        top_edge = None
        for s, e in face_edges:
            alt = .5 * (nodes[s].coords[0][2] + nodes[e].coords[0][1])
            if alt > top_altitude:
                top_edge = LineString([nodes[s].coords[0], nodes[e].coords[0]])

        origin = array(top_edge.coords[0])
        u = array(top_edge.coords[1]) - origin
        z = array((0, 0, 1))
        w = cross(z, u)
        w /= norm(w)
        v = cross(w, z)

        lines += face_lines

        linework = [
            array((s, e)) for l in face_lines
            for s, e in zip(l.points[:-1], l.points[1:])
        ]
        linework_sav = linework

        if DEBUG:
            open("/tmp/face_{}.vtk".format(face_idx), 'w').write(
                to_vtk(
                    MultiLineString([LineString(l)
                                     for l in linework]).wkb_hex))
            debug_files.append("/tmp/face_{}.vtk".format(face_idx))

        node_map = {(round(dot(p - origin, v),
                           PRECI), round(dot(p - origin, z), PRECI)): p
                    for e in linework for p in e}
        linework = [
            LineString([(round(dot(e[0] - origin, v),
                               PRECI), round(dot(e[0] - origin, z), PRECI)),
                        (round(dot(e[1] - origin, v),
                               PRECI), round(dot(e[1] - origin, z), PRECI))])
            for e in linework
        ]

        if DEBUG:
            bug = 0
            for i, li in enumerate(linework):
                if li.length <= 0:
                    # fix_print_with_import
                    print(('zero length line', i, li.wkt))
                    bug = True
                    break
                found = False
                for j, lj in enumerate(linework):
                    if i!=j and (not (lj.coords[0] != li.coords[0] or lj.coords[1] != li.coords[1]) \
                            or not (lj.coords[0] != li.coords[1] or lj.coords[1] != li.coords[0])):
                        open(
                            "/tmp/dup_line_{}_face_{}.vtk".format(
                                bug, face_idx), 'w').write(
                                    to_vtk(
                                        MultiLineString([
                                            LineString(linework_sav[j])
                                        ]).wkb_hex))
                        # fix_print_with_import
                        print(('duplicate line', li.wkt, lj.wkt))
                        bug += 1
                    if i != j and li.coords[1] == lj.coords[0] or li.coords[
                            1] == lj.coords[1]:
                        found = True
                if not found:
                    print(MultiLineString(linework).wkt)
                    bug += 1
            if bug:
                # fix_print_with_import
                print(('open', MultiLineString(linework).wkt))
                assert (False)

        domain = [
            Polygon([
                nodes[e[0]].coords[0], nodes[e[0]].coords[1],
                nodes[e[1]].coords[1], nodes[e[1]].coords[0]
            ]) for e in triangle_edges if e in face_edges
        ]
        domain = unary_union([
            Polygon([(round(dot(p - origin, v),
                            PRECI), round(dot(p - origin, z), PRECI))
                     for p in array(dom.exterior.coords)]) for dom in domain
        ])

        polygons = list(polygonize(linework))
        domain_tri = []
        term_tri = []
        for p in polygons:
            p = p if p.exterior.is_ccw else Polygon(p.exterior.coords[::-1])
            assert (p.exterior.is_ccw)
            for t in tessellate(p):
                tri = t.exterior.coords
                q = Polygon([node_map[tri[0]], node_map[tri[1]], node_map[tri[2]]]) \
                    if direct_orientation else \
                    Polygon([node_map[tri[2]], node_map[tri[1]], node_map[tri[0]]])
                if Point(average(tri, (0, ))).intersects(domain):
                    domain_tri.append(q)
                else:
                    term_tri.append(q)

        result += domain_tri
        faces[(hl, hr)] += domain_tri

        top_lines = [l for l in face_lines if l.side == Line.TOP]
        bottom_lines = [l for l in face_lines if l.side == Line.BOTTOM]
        end_lines = {
            tuple(nodes[n].coords): holes[n]
            for n in list(ends.keys())
        }
        if DEBUG:
            open('/tmp/top_lines_face_{}.vtk'.format(face_idx), 'w').write(
                to_vtk(MultiLineString([l.points for l in top_lines]).wkb_hex))
            open('/tmp/bottom_lines_face_{}.vtk'.format(face_idx), 'w').write(
                to_vtk(
                    MultiLineString([l.points for l in bottom_lines]).wkb_hex))
            open('/tmp/offsets_bis.vtk', 'w').write(
                to_vtk(
                    MultiLineString([
                        LineString([k, v]) for k, v in list(offsets.items())
                    ]).wkb_hex))

        # create terminations
        terms = []
        edges = set()
        for t in term_tri:
            for s, e in zip(t.exterior.coords[:-1], t.exterior.coords[1:]):
                if (e, s) in edges:
                    edges.remove((e, s))
                else:
                    edges.add((s, e))
        for t in term_tri:
            share = False
            for d in domain_tri:
                if share_an_edge(t, d):
                    share = True
                    break
            if share:
                continue
            terms.append(t)
            faces[(hl, hr)] += [t]
            terms.append(
                Polygon(offset_coords(offsets, t.exterior.coords[::-1])))
            for s in zip(t.exterior.coords[:-1], t.exterior.coords[1:]):
                if s in edges:
                    if (is_segment(s, top_lines) or is_segment(s, bottom_lines) or s in end_lines)\
                            and s[0] in offsets and s[1] in offsets:
                        terms.append(Polygon([offsets[s[0]], s[1], s[0]]))
                        terms.append(
                            Polygon([offsets[s[0]], offsets[s[1]], s[1]]))
                    if (s[1], s[0]) in end_lines:
                        terms.append(Polygon([s[1], s[0], offsets[s[1]]]))
                        terms.append(
                            Polygon([s[0], offsets[s[0]], offsets[s[1]]]))
                        faces[tuple(
                            sorted((end_lines[(s[1], s[0])],
                                    other_hole)))] += terms[-2:]
        termination += terms

    if DEBUG:
        open("/tmp/faces.obj", 'w').write(to_obj(MultiPolygon(result).wkb_hex))
        open("/tmp/termination.obj",
             'w').write(to_obj(MultiPolygon(termination).wkb_hex))

    if len(result):

        top_lines = [l for l in lines if l.side == Line.TOP]
        bottom_lines = [l for l in lines if l.side == Line.BOTTOM]
        # find openfaces (top and bottom)
        edges = set()
        for t in result:
            for s, e in zip(t.exterior.coords[:-1], t.exterior.coords[1:]):
                if (e, s) in edges:
                    edges.remove((e, s))
                else:
                    edges.add((s, e))
        top_linework = []
        bottom_linework = []
        for e in edges:
            if is_segment(e, top_lines):
                bottom_linework.append((tuple(e[0]), tuple(e[1])))
            elif is_segment(e, bottom_lines):
                top_linework.append((tuple(e[0]), tuple(e[1])))

        if DEBUG:
            open("/tmp/linework_top_unm.vtk", 'w').write(
                to_vtk(
                    MultiLineString([LineString(e)
                                     for e in top_linework]).wkb_hex))
            open("/tmp/linework_bottom_unm.vtk", 'w').write(
                to_vtk(
                    MultiLineString([LineString(e)
                                     for e in bottom_linework]).wkb_hex))

        # linemerge top and bottom, there will be open rings that need to be closed
        # since we did only add linework for faces
        for face, side in zip(('top', 'bottom'),
                              (top_linework, bottom_linework)):
            merged = linemerge(side)

            if DEBUG:
                open("/tmp/linework_%s.vtk" % (face), 'w').write(
                    to_vtk(
                        MultiLineString([LineString(e)
                                         for e in merged]).wkb_hex))
            face_triangles = []
            for m in merged:
                if has_proper_2d_topology(m):
                    node_map = {(round(x[0], PRECI), round(x[1], PRECI)): x
                                for x in m}
                    p = Polygon([(round(x[0], PRECI), round(x[1], PRECI))
                                 for x in m])
                    p = p if p.exterior.is_ccw else Polygon(
                        p.exterior.coords[::-1])
                    assert (p.exterior.is_ccw)
                    for t in tessellate(p):
                        tri = t.exterior.coords
                        q = Polygon([node_map[tri[0]], node_map[tri[1]], node_map[tri[2]]]) \
                            if face == 'bottom' else \
                            Polygon([node_map[tri[2]], node_map[tri[1]], node_map[tri[0]]])
                        result.append(q)
                        face_triangles.append(q)
            if DEBUG:
                open("/tmp/face_{}.obj".format(face),
                     'w').write(to_obj(MultiPolygon(face_triangles).wkb_hex))

    # adds isolated nodes terminations
    for n, l in list(ends.items()):
        if len(l) == 2:
            node = nodes[n]
            A, B, C = array(node.coords[0]), array(l[0].coords[0]), array(
                l[1].coords[0])
            k1, k2 = tuple(sorted(
                (holes[n],
                 end_holes[n][0]))), tuple(sorted((holes[n], end_holes[n][1])))
            l = l
            if dot(cross(B - A, C - A), array((0., 0., 1.))) <= 0:
                l = list(reversed(l))
                k1, k2 = k2, k1
            termination += [
                Polygon([node.coords[0], l[0].coords[0], l[1].coords[0]]),
                Polygon([l[1].coords[-1], l[0].coords[-1], node.coords[-1]]),
                Polygon([node.coords[0], node.coords[1], l[0].coords[0]]),
                Polygon([node.coords[1], l[0].coords[1], l[0].coords[0]]),
                Polygon([l[1].coords[0], node.coords[1], node.coords[0]]),
                Polygon([l[1].coords[0], l[1].coords[1], node.coords[1]]),
                Polygon([l[0].coords[0], l[0].coords[1], l[1].coords[0]]),
                Polygon([l[0].coords[1], l[1].coords[1], l[1].coords[0]])
            ]
            assert (len(end_holes[n]) == 2)
            faces[k1] += [
                Polygon([node.coords[0], node.coords[1], l[0].coords[0]]),
                Polygon([node.coords[1], l[0].coords[1], l[0].coords[0]]),
            ]
            faces[k2] += [
                Polygon([l[1].coords[0], node.coords[1], node.coords[0]]),
                Polygon([l[1].coords[0], l[1].coords[1], node.coords[1]]),
            ]

    result += termination

    if DEBUG:
        for hp, tri in faces.items():
            open("/tmp/face_{}_{}.obj".format(hp[0], hp[1]),
                 'w').write(to_obj(MultiPolygon([t for t in tri]).wkb_hex))

    # decompose volume in connected components
    edges = {}
    graph = {i: set() for i in range(len(result))}
    for ip, p in enumerate(result):
        for s, e in zip(p.exterior.coords[:-1], p.exterior.coords[1:]):
            if (e, s) in edges:
                o = edges[(e, s)]
                graph[o].add(ip)
                graph[ip].add(o)
                del edges[(e, s)]
            else:
                edges[(s, e)] = ip

    def pop_connected(n, graph):
        connected = set([n])
        if n in graph:
            for ng in graph.pop(n):
                connected = connected.union(pop_connected(ng, graph))
        return connected

    connected = []
    while len(graph):
        n = next(iter(list(graph.keys())))
        connected.append(pop_connected(n, graph))

    i = 0
    for c in connected:
        i += 1
        face1 = []
        face2 = []
        face3 = []
        triangles = [result[i] for i in c]
        res = MultiPolygon(triangles)

        for f in faces[(sorted_holes[0], sorted_holes[1])]:
            if f in triangles:
                face1.append(f)
        for f in faces[(sorted_holes[1], sorted_holes[2])]:
            if f in triangles:
                face2.append(f)
        for f in faces[(sorted_holes[0], sorted_holes[2])]:
            if f in triangles:
                face3.append(f)

        if DEBUG:
            open("/tmp/face1_tr_%d.obj" % (i),
                 'w').write(to_obj(face1.wkb_hex))
            open("/tmp/face2_tr_%d.obj" % (i),
                 'w').write(to_obj(face2.wkb_hex))
            open("/tmp/face3_tr_%d.obj" % (i),
                 'w').write(to_obj(face3.wkb_hex))
            open("/tmp/volume_tr.obj", 'w').write(to_obj(res.wkb_hex))
            # check volume is closed
            edges = set()
            for p in res:
                for s, e in zip(p.exterior.coords[:-1], p.exterior.coords[1:]):
                    if (e, s) in edges:
                        edges.remove((e, s))
                    else:
                        edges.add((s, e))
            if len(edges):
                print("volume is not closed", edges)
                open("/tmp/unconnected_edge.vtk", 'w').write(
                    to_vtk(
                        MultiLineString([LineString(e)
                                         for e in edges]).wkb_hex))

            # check volume is positive
            volume = 0
            for p in res:
                r = p.exterior.coords
                v210 = r[2][0] * r[1][1] * r[0][2]
                v120 = r[1][0] * r[2][1] * r[0][2]
                v201 = r[2][0] * r[0][1] * r[1][2]
                v021 = r[0][0] * r[2][1] * r[1][2]
                v102 = r[1][0] * r[0][1] * r[2][2]
                v012 = r[0][0] * r[1][1] * r[2][2]
                volume += (1. / 6.) * (-v210 + v120 + v201 - v021 - v102 +
                                       v012)
            if volume <= 0:
                print("volume is", volume)

        res = translate(res, translation[0], translation[1], translation[2])
        geos.lgeos.GEOSSetSRID(res._geom, srid_)

        face1 = translate(MultiPolygon(face1), translation[0], translation[1],
                          translation[2])
        geos.lgeos.GEOSSetSRID(face1._geom, srid_)

        face2 = translate(MultiPolygon(face2), translation[0], translation[1],
                          translation[2])
        geos.lgeos.GEOSSetSRID(face2._geom, srid_)

        face3 = translate(MultiPolygon(face3), translation[0], translation[1],
                          translation[2])
        geos.lgeos.GEOSSetSRID(face3._geom, srid_)

        empty_mp = "SRID={} ;MULTIPOLYGONZ EMPTY".format(srid_)
        yield (res.wkb_hex if not res.is_empty else empty_mp,
               face1.wkb_hex if not face1.is_empty else empty_mp,
               face2.wkb_hex if not face2.is_empty else empty_mp,
               face3.wkb_hex if not face3.is_empty else empty_mp)

    for f in debug_files:
        os.remove(f)
Exemple #2
0
    def createConvexPath(self, pair, FID_ij):
        #pr = cProfile.Profile()
        #pr2 = cProfile.Profile()

        fd_fullPayload = 5 * 5280
        fd_empty = 10 * 5280
        fd_delivery = 3.33 * 5280

        #print pair
        odPointsList = ((pair[0].x, pair[0].y), (pair[1].x, pair[1].y))
        st_line = LineString(odPointsList)
        if self.indi == "FF":
            if st_line.length > fd_fullPayload:
                return 0, 0, None
        elif self.indi == "FD":
            if st_line.length > fd_delivery:
                return 0, 0, None
        labeledObstaclePoly = []
        totalConvexPathList = {}

        dealtArcList = {}
        totalConvexPathList[odPointsList] = LineString(odPointsList)

        terminate = 0
        idx_loop1 = 0
        #sp_l_set = []
        time_loop1 = 0
        time_contain2 = 0
        time_crossingDict = 0
        time_convexLoop = 0
        time_impedingArcs = 0
        time_spatialFiltering = 0
        time_loop1_crossingDict = 0
        time_buildConvexHulls = 0
        while terminate == 0:
            t1s = time.time()
            idx_loop1 += 1

            t6s = time.time()

            totalGrpah = self.createGraph(totalConvexPathList.keys())
            spatial_filter_n = networkx.dijkstra_path(totalGrpah,
                                                      odPointsList[0],
                                                      odPointsList[1])
            spatial_filter = []
            for i in xrange(len(spatial_filter_n) - 1):
                spatial_filter.append(
                    [spatial_filter_n[i], spatial_filter_n[i + 1]])

            crossingDict = defaultdict(list)

            for line in spatial_filter:
                Line = LineString(line)
                for obs in self.obstaclesPolygons:
                    if Line.crosses(obs):
                        if obs not in labeledObstaclePoly:
                            labeledObstaclePoly.append(obs)

                        crossingDict[tuple(line)].append(obs)

            t6e = time.time()
            time_spatialFiltering += t6e - t6s

            if len(crossingDict.keys()) == 0:

                terminate = 1
                continue
            else:
                t7s = time.time()
                for tLine in crossingDict.keys():
                    #cLine = list(tLine)
                    if dealtArcList.has_key(tLine):
                        try:
                            del totalConvexPathList[tLine]
                        except:
                            del totalConvexPathList[(tLine[1], tLine[0])]
                        continue
                    else:
                        dealtArcList[tLine] = LineString(list(tLine))
                        try:
                            del totalConvexPathList[tLine]
                        except:
                            del totalConvexPathList[(tLine[1], tLine[0])]
                        containingObs = []
                        for obs in crossingDict[tLine]:

                            convexHull = self.createConvexhull(obs, tLine)
                            self.splitBoundary(totalConvexPathList, convexHull)

                            convexHull = self.createConvexhull(
                                obs, odPointsList)
                            self.splitBoundary(totalConvexPathList, convexHull)
                            convexHull2 = self.createConvexhull(obs)
                            if convexHull2.contains(Point(tLine[0])):
                                containingObs.append(obs)
                            elif convexHull2.contains(Point(tLine[1])):
                                containingObs.append(obs)
                        if len(containingObs) != 0:  #SPLIT
                            subConvexPathList = {}
                            vi_obs = MultiPolygon([x for x in containingObs])
                            containedLineCoords = list(tLine)
                            fromX = containedLineCoords[0][0]
                            fromY = containedLineCoords[0][1]
                            toX = containedLineCoords[1][0]
                            toY = containedLineCoords[1][1]
                            fxA = (fromY - toY) / (fromX - toX)
                            fxB = fromY - (fxA * fromX)
                            minX = vi_obs.bounds[0]
                            maxX = vi_obs.bounds[2]
                            split_line = LineString([
                                (min(minX, fromX,
                                     toX), fxA * min(minX, fromX, toX) + fxB),
                                (max(maxX, fromX,
                                     toX), fxA * max(maxX, fromX, toX) + fxB)
                            ])

                            for obs in containingObs:
                                s1, s2 = self.splitPolygon(split_line, obs)
                                dividedObsPoly = []
                                #to deal with multipolygon
                                a = s1.intersection(obs)
                                b = s2.intersection(obs)
                                if a.type == "Polygon":
                                    dividedObsPoly.append(a)
                                else:
                                    for o in a.geoms:
                                        if o.type == "Polygon":
                                            dividedObsPoly.append(o)
                                if b.type == "Polygon":
                                    dividedObsPoly.append(b)
                                else:
                                    for o2 in b.geoms:
                                        if o2.type == "Polygon":
                                            dividedObsPoly.append(o2)

                                for obs2 in dividedObsPoly:
                                    for pt in tLine:
                                        convexHull = self.createConvexhull(
                                            obs2, [pt])
                                        self.splitBoundary(
                                            subConvexPathList, convexHull)
                            subVertices = []
                            for line in subConvexPathList:
                                subVertices.extend(line)
                            subVertices = list(set(subVertices))
                            containingObsVertices = []
                            for obs in containingObs:
                                containingObsVertices.extend(
                                    list(obs.exterior.coords))
                            subVertices = [
                                x for x in subVertices
                                if x in containingObsVertices
                            ]
                            deleteList = []
                            for line in subConvexPathList:
                                chk_cross = 0
                                for obs in containingObs:
                                    if subConvexPathList[line].crosses(obs):
                                        chk_cross = 1
                                if chk_cross == 1:
                                    deleteList.append(line)
                            for line in deleteList:
                                del subConvexPathList[line]
                                #subConvexPathList.remove(line)
                            pairList = []
                            for i in range(len(subVertices)):
                                for j in range(i + 1, len(subVertices)):
                                    pairList.append(
                                        (subVertices[i], subVertices[j]))
                            for i in pairList:
                                Line = LineString(i)
                                chk_cross = 0
                                for obs in containingObs:
                                    if Line.crosses(obs):
                                        chk_cross = 1
                                    elif Line.within(obs):
                                        chk_cross = 1
                                if chk_cross == 0:
                                    subConvexPathList[i] = Line
                                    #subConvexPathList.append(i)
                            buffer_st_line = split_line.buffer(0.1)
                            deleteList = []
                            for line in subConvexPathList:
                                if buffer_st_line.contains(
                                        subConvexPathList[line]):
                                    deleteList.append(line)
                            for line in deleteList:
                                if subConvexPathList.has_key(line):
                                    del subConvexPathList[line]
                            #subConvexPathList = [x for x in subConvexPathList if x not in deleteList]
                            for line in subConvexPathList:
                                if not totalConvexPathList.has_key(line):
                                    if not totalConvexPathList.has_key(
                                        (line[1], line[0])):
                                        totalConvexPathList[
                                            line] = subConvexPathList[
                                                line]  #if line not in totalConvexPathList:
                                    #if [line[1], line[0]] not in totalConvexPathList:
                                    #totalConvexPathList.append(line)

                #w = shapefile.Writer(shapefile.POLYLINE)
                #w.field('nem')
                #for line in totalConvexPathList:
                #w.line(parts=[[ list(x) for x in line ]])
                #w.record('ff')
                #w.save(self.path + "graph2_" + str(idx_loop1) + self.version_name)
                t7e = time.time()
                time_loop1_crossingDict += t7e - t7s
                #new lines
                labeled_multyPoly = MultiPolygon(
                    [x for x in labeledObstaclePoly])
                convexHull = self.createConvexhull(labeled_multyPoly,
                                                   odPointsList)
                self.splitBoundary(totalConvexPathList, convexHull)
                #new lines end

                #impededPathList
                t5s = time.time()
                impededPathList = {}
                for line in totalConvexPathList:
                    for obs in labeledObstaclePoly:
                        if totalConvexPathList[line].crosses(obs):
                            impededPathList[line] = totalConvexPathList[line]
                            break
                t5e = time.time()
                time_impedingArcs += t5e - t5s
                for line in impededPathList:
                    del totalConvexPathList[line]

                terminate2 = 0
                idx_loop2 = 0
                t1e = time.time()
                time_loop1 += t1e - t1s
                while terminate2 == 0:
                    idx_loop2 += 1

                    deleteList = []
                    crossingDict = defaultdict(list)

                    for line in dealtArcList:
                        if impededPathList.has_key(line):
                            del impededPathList[line]
                        elif impededPathList.has_key((line[1], line[0])):
                            del impededPathList[(line[1], line[0])]

                    t3s = time.time()
                    #pr.enable()
                    for line in impededPathList:
                        for obs in labeledObstaclePoly:
                            if impededPathList[line].crosses(obs):
                                crossingDict[line].append(obs)

                    t3e = time.time()
                    time_crossingDict += t3e - t3s
                    #at this point, impededArcList should be emptied, as it only contains crossing arcs, and all of them
                    #should be replaced by convex hulls.
                    for line in crossingDict:
                        del impededPathList[line]
                    for line in impededPathList:
                        if not totalConvexPathList.has_key(line):
                            totalConvexPathList[line] = impededPathList[line]
                    impededPathList = {}

                    if len(crossingDict.keys()) == 0:
                        terminate2 = 1
                        continue
                    else:
                        #w = shapefile.Writer(shapefile.POLYLINE)
                        #w.field('nem')
                        #for line in crossingDict:
                        #w.line(parts=[[ list(x) for x in line ]])
                        #w.record('ff')
                        #w.save(self.path + "crossingDict_" + str(idx_loop1) + "_"+ str(idx_loop2) +"_"+ self.version_name)
                        t4s = time.time()

                        for tLine in crossingDict.keys():
                            dealtArcList[tLine] = crossingDict[tLine]
                            containingObs = []
                            for obs in crossingDict[tLine]:
                                chk_contain = 0
                                convexHull2 = self.createConvexhull(obs)
                                if convexHull2.contains(Point(tLine[0])):
                                    containingObs.append(obs)
                                    chk_contain = 1
                                elif convexHull2.contains(Point(tLine[1])):
                                    containingObs.append(obs)
                                    chk_contain = 1
                                if chk_contain == 0:
                                    t10s = time.time()
                                    convexHull = self.createConvexhull(
                                        obs, tLine)
                                    self.splitBoundary(impededPathList,
                                                       convexHull)
                                    t10e = time.time()
                                    time_buildConvexHulls += t10e - t10s

                            if len(containingObs) != 0:  #SPLIT
                                #print "SPLIT"
                                t2s = time.time()
                                subConvexPathList = {}
                                vi_obs = MultiPolygon(
                                    [x for x in containingObs])
                                containedLineCoords = tLine
                                fromX = containedLineCoords[0][0]
                                fromY = containedLineCoords[0][1]
                                toX = containedLineCoords[1][0]
                                toY = containedLineCoords[1][1]
                                fxA = (fromY - toY) / (fromX - toX)
                                fxB = fromY - (fxA * fromX)
                                minX = vi_obs.bounds[0]
                                maxX = vi_obs.bounds[2]
                                split_line = LineString([
                                    (min(minX, fromX, toX),
                                     fxA * min(minX, fromX, toX) + fxB),
                                    (max(maxX, fromX, toX),
                                     fxA * max(maxX, fromX, toX) + fxB)
                                ])

                                for obs in containingObs:
                                    s1, s2 = self.splitPolygon(split_line, obs)
                                    dividedObsPoly = []
                                    #to deal with multipolygon
                                    a = s1.intersection(obs)
                                    b = s2.intersection(obs)
                                    if a.type == "Polygon":
                                        dividedObsPoly.append(a)
                                    else:
                                        for o in a.geoms:
                                            if o.type == "Polygon":
                                                dividedObsPoly.append(o)
                                    if b.type == "Polygon":
                                        dividedObsPoly.append(b)
                                    else:
                                        for o2 in b.geoms:
                                            if o2.type == "Polygon":
                                                dividedObsPoly.append(o2)

                                    for obs2 in dividedObsPoly:
                                        for pt in tLine:
                                            convexHull = self.createConvexhull(
                                                obs2, [pt])
                                            self.splitBoundary(
                                                subConvexPathList, convexHull)
                                subVertices = []
                                for line in subConvexPathList:
                                    subVertices.extend(line)
                                subVertices = list(set(subVertices))
                                containingObsVertices = []
                                for obs in containingObs:
                                    containingObsVertices.extend(
                                        list(obs.exterior.coords))
                                subVertices = [
                                    x for x in subVertices
                                    if x in containingObsVertices
                                ]
                                deleteList = []
                                for line in subConvexPathList:
                                    chk_cross = 0
                                    for obs in containingObs:
                                        if subConvexPathList[line].crosses(
                                                obs):
                                            chk_cross = 1
                                    if chk_cross == 1:
                                        deleteList.append(line)
                                for line in deleteList:
                                    del subConvexPathList[line]

                                pairList = []
                                for i in range(len(subVertices)):
                                    for j in range(i + 1, len(subVertices)):
                                        pairList.append(
                                            (subVertices[i], subVertices[j]))

                                for i in pairList:
                                    Line = LineString(list(i))
                                    chk_cross = 0
                                    for obs in containingObs:
                                        if Line.crosses(obs):
                                            chk_cross = 1
                                        elif Line.within(obs):
                                            chk_cross = 1
                                    if chk_cross == 0:
                                        subConvexPathList[i] = Line

                                buffer_st_line = split_line.buffer(0.1)
                                deleteList = []
                                for line in subConvexPathList:
                                    if buffer_st_line.contains(
                                            subConvexPathList[line]):
                                        deleteList.append(line)
                                for line in deleteList:
                                    del subConvexPathList[line]
                                for line in subConvexPathList:
                                    if not impededPathList.has_key(line):
                                        if not impededPathList.has_key(
                                            (line[1], line[0])):
                                            impededPathList[
                                                line] = subConvexPathList[line]

                                t2e = time.time()
                                time_contain2 += t2e - t2s
                        #pr.disable()
                        for line in dealtArcList:
                            if impededPathList.has_key(line):
                                del impededPathList[line]
                        #impededPathList = [x for x in impededPathList if x not in dealtArcList]
                        t4e = time.time()
                        time_convexLoop += t4e - t4s
                        #end of else
                    #w = shapefile.Writer(shapefile.POLYLINE)
                    #w.field('nem')
                    #for line in impededPathList:
                    #w.line(parts=[[ list(x) for x in line ]])
                    #w.record('ff')
                    #w.save(self.path + "After_graph_" + str(idx_loop1) + "_"+ str(idx_loop2) +"_"+ self.version_name)
                    #end of while2
                for line in impededPathList:
                    if not totalConvexPathList.has_key(line):
                        totalConvexPathList[line] = impededPathList[line]

                #totalConvexPathList.extend(impededPathList)
        totalGraph = self.createGraph(totalConvexPathList.keys())
        esp_n = networkx.dijkstra_path(totalGraph, odPointsList[0],
                                       odPointsList[1])
        esp = []
        for i in range(len(esp_n) - 1):
            esp.append([esp_n[i], esp_n[i + 1]])
        w = shapefile.Writer(shapefile.POLYLINE)
        #w.field('nem')
        #no_edges = 0
        #for line in totalConvexPathList.keys():
        #no_edges += 1
        #w.line(parts=[[ list(x) for x in line ]])
        #w.record('ff')
        #w.save(self.path + "totalpath_" + "%s" % FID_ij )
        #w = shapefile.Writer(shapefile.POLYLINE)
        if self.indi == "FF":
            w.field('nem')
            for line in esp:
                w.line(parts=[[list(x) for x in line]])
                w.record('ff')
            w.save(self.path + "ESP_" + "%s" % FID_ij)
        #targetPysal = pysal.IOHandlers.pyShpIO.shp_file(self.path + "ESP_" + "%s" % FID_ij)
        #targetShp = self.generateGeometry(targetPysal)
        total_length = 0
        for coords in esp:
            line = LineString(coords)
            total_length += line.length

        if self.indi == "FF":
            if total_length <= fd_fullPayload:

                return 1, total_length, self.path + "ESP_" + FID_ij + ".shp"
            else:
                return 0, 0, None
        elif self.indi == 'FD':
            if total_length <= fd_delivery:
                return 1, total_length, None
            else:
                return 0, 0, None
Exemple #3
0
from shapely.geometry import Point, MultiPolygon, Polygon
import numpy as np

point = Point(-38.561737, -3.736494)
poligon = Polygon()
import ipdb
ipdb.set_trace()
multi = MultiPolygon([[]], [])
def main():

    print(sys.argv)

    catalog_v = sys.argv[1]
    projection = sys.argv[2]
    datasetId = sys.argv[3]
    entryId = sys.argv[4]
    parameter = sys.argv[5]
    time = sys.argv[6]
    tab = sys.argv[7]
    colorBar = sys.argv[8]

    print(projection)

    print('Starting')

    # Polygon

    # Convert KML file to Shape File
    # (because I had existing code for working with shape files)
    # Wont use this here, but do want to make sure I can get the kml converter working for future use
    # after testing pull straight from the pre-generated shp file

    myShape = keyholemarkup2x('./config/masks/antarctic_bounds/polymask.kml',
                              output='shp')

    #print(myShape)
    # Open Shapefiles, read coverage areas
    # get extents, then query Erddap
    # use shapely to pull data from within polygons

    areaProps = []

    for pol in fiona.open('./config/masks/antarctic_bounds/polymask.shp'):
        areaProps.append(pol['properties'])

    Multi = MultiPolygon([
        shape(pol['geometry'])
        for pol in fiona.open('./config/masks/antarctic_bounds/polymask.shp')
    ])
    #Multi.wkt

    polygon = Multi[0]  # we only have one polygon

    # the shapefile data, satellite data and plotting projections are all different
    # this step transforms the shapefile data to the 4326 projection of the satellite data
    # requests to the server need to be in 4326
    #print(polygon.bounds)

    # if working with a projected shapefile instead of kml file
    # see the 06_09_ShapefileLoad notebook for demo of how to work with a shape file that is projected

    epsg4326 = pyproj.Proj(
        init='epsg:4326')  # lon/lat coordinate system (polygon kml file)
    epsg3031 = pyproj.Proj(init='epsg:3031')  # South polar stereo plots)
    epsg3412 = pyproj.Proj(
        init='epsg:3412')  # South polar stereo (data coordinates)
    esri102020 = pyproj.Proj(
        init='esri:102020')  # Antarctic equal area (area calculations)

    #convert polygon to dataset projection
    project = partial(
        pyproj.transform,
        pyproj.Proj(init='epsg:4326'),  # lon/lat coordinate system
        pyproj.Proj(init='epsg:3412'))  # south polar stereo

    p3412 = transform(project,
                      polygon)  # new shapely polygon with new projection

    #print(p3412.bounds) #minx, miny, maxx, maxy

    # Erddap want the bounds in a different order (miny, maxy, minx, maxx)
    p3412boundSwap = [
        p3412.bounds[1], p3412.bounds[0], p3412.bounds[3], p3412.bounds[2]
    ]

    # Convert original polygon to equal area projection
    # Calculate the area of the polygon
    areaProj = partial(
        pyproj.transform,
        pyproj.Proj(init='epsg:4326'),  # lon/lat coordinate system
        pyproj.Proj(init='esri:102020'))  # antarctic equal area

    p102020 = transform(
        areaProj,
        polygon)  # new shapely polygon in new projection, used later?
    p102020_area = transform(
        areaProj,
        polygon).area  # area of projected polygon in meters (projection units)
    study_area_km = p102020_area / 1000000  # area of polygon in km squared
    #print(study_area_km)

    study_area_info = {'study_area_square_km': study_area_km}

    # Get polygon path for data masking and plotting
    polyListx, polyListy = p3412.exterior.xy  # perimeter of polygon
    polyList = list(zip(list(polyListx),
                        list(polyListy)))  # formatted perimeter
    studyAreaPath = path.Path(polyList)  # path for data mask, in EPSG:3031

    # Dataset Info

    # What is the id of the dataset you want to work with?
    # Will use the NSIDC CDR Sea Ice Concentration Monthly when it is loaded in
    dId = 'nsidcSISQSHmday'

    # Get dataset metadata info from ERDDAP in preparation for data request
    erddap_metadata = getDatasetInfo(dId)
    md = makemd(erddap_metadata)
    md["dimensions"] = getDimensions(erddap_metadata)
    md["parameters"] = getParameterList(erddap_metadata)

    # Get valid times for this dataset to later loop through each timestep

    # Customize start time because we know the cdr data actually doesn't start until July 1987
    # (the way the data is published the timestamps go back further but there is no data prior to July 1987)
    timeStart = '1987-07-01T00:00:00Z'
    timeEnd = md["time_coverage_end"]

    validTimesUrl = 'https://polarwatch.noaa.gov/erddap/griddap/' + dId + '.json?time[(' + timeStart + '):1:(' + timeEnd + ')]'

    http = urllib3.PoolManager()

    projection = 'epsg3031'
    tab = 'monthly'
    colorBar = 'KT_ice,,,0,1,'
    parameter = 'seaice_conc_monthly_cdr'
    m0 = datetime.now()
    entryId = 'ice-nsidc-cdr'
    datasetId = 'nsidcSISQSHmday'
    time = '2017-01-18T00:00:00Z'

    # apache needs to make these directories here for the permissions to be set correctly
    mapImageDirRoot = '/home/jpatterson/pythonscripts/projected_data_demo/'
    if not os.path.exists(mapImageDirRoot): os.makedirs(mapImageDirRoot)

    logdir = mapImageDirRoot + 'logs'
    if not os.path.exists(logdir): os.makedirs(logdir)

    mapImageDir = mapImageDirRoot + entryId + '/' + datasetId
    if not os.path.exists(mapImageDir):
        try:
            os.makedirs(mapImageDir)  # creates with default perms 0777
            os.chmod(mapImageDir, 0o4775)
        except:
            print('could not make image directory')

    # ** Setup logging system **
    todayStr = datetime.today().strftime("%m_%d_%Y")
    log_fn = mapImageDirRoot + '/logs/catalog.log'
    logger = logging.getLogger(__name__)
    logger.setLevel(logging.INFO)
    # create a file handler
    handler = logging.FileHandler(log_fn)
    handler.setLevel(logging.INFO)
    # create a logging format
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    # add the handlers to the logger
    logger.addHandler(handler)  # file writing
    logger.addHandler(logging.StreamHandler())  # print to console
    # to include in log file for records
    scriptstart = datetime.now()
    scriptstartstr = scriptstart.strftime("%Y-%m-%d %H:%M:%S")
    #logger.info('PREVIEW.PY START: ' + scriptstartstr)

    mm = time[5:7]
    dd = time[8:10]
    yyyy = time[0:4]
    HH = time[11:13]
    MM = time[14:16]
    timep = yyyy + '_' + mm + '_' + dd + '_' + HH + '_' + MM

    #Keep track of the input info to pass back to the webpage
    req = {
        'projection': projection,
        'datasetId': datasetId,
        'time': time,
        'timep': timep,
        'scriptstarttime': scriptstartstr,
        'entryId': entryId,
        'parameter': parameter,
        'tab': tab,
        'colorBar': colorBar
    }

    dimensions = getDimensions(erddap_metadata)

    ycoord_dimension = next(
        (dimension for dimension in dimensions if dimension["axis"] == "Y"))
    xcoord_dimension = next(
        (dimension for dimension in dimensions if dimension["axis"] == "X"))
    logger.info(ycoord_dimension["name"])

    #working on rewriting this section to use the structure I am creating for the info
    # stopped here 1/22/18 shutdown
    ycoord_cells = getDimensionInfo(dimensions, ycoord_dimension["name"],
                                    'nValues')
    xcoord_cells = getDimensionInfo(dimensions, xcoord_dimension["name"],
                                    'nValues')

    # use erddap info to determine if latitude is increasing or decreasing
    req["ycoord_range"] = getDimensionInfo(dimensions,
                                           ycoord_dimension["name"],
                                           'actual_range')
    ycoord_avgSpacing = getDimensionInfo(
        dimensions, ycoord_dimension["name"],
        'averageSpacing')  # used in check bounds

    if float(ycoord_avgSpacing) >= 0:
        ycoord_1 = str(float(req["ycoord_range"][0]))
        ycoord_2 = str(float(req["ycoord_range"][1]))
    else:
        ycoord_1 = str(float(req["ycoord_range"][1]))
        ycoord_2 = str(float(req["ycoord_range"][0]))

    req["ycoord_range"] = [ycoord_1, ycoord_2]

    req["xcoord_range"] = getDimensionInfo(dimensions,
                                           xcoord_dimension["name"],
                                           'actual_range')

    #req["ycoord_res"] = getDimensionInfo(dimensions, ycoord_dimension["name"], 'averageSpacing')
    #req["xcoord_res"] = getDimensionInfo(dimensions, xcoord_dimension["name"], 'averageSpacing')

    # Note: Cannot use ERDDAP actual range plus resolution to determine extent (innaccurate)

    # using erddap provided dataset size to determine how large of spacing to use for dataset request.
    # want images to be less than 1000px
    ycoord_sub = float(ycoord_cells) / 800
    xcoord_sub = float(xcoord_cells) / 800

    # pick the larger of the two spacing options, should usually be the lon sub
    if ycoord_sub <= xcoord_sub: sub = xcoord_sub
    else: sub = ycoord_sub
    #sub = (lat_sub + lon_sub)/2         # Use the average of the two
    req["sub"] = str((math.ceil(sub)))  # Round up to largest whole number

    # ?add a check that dataset has data in this region (arctic, antarctic)
    # if not pass something back to the webpage that says it doesn't have data in the area

    # Use polygon maximum bounds to make sure there should be data available in the polygon
    # return the bounds in the format required by the data server
    ycoord_min = ycoord_dimension['valid_range'][0]
    ycoord_max = ycoord_dimension['valid_range'][1]
    xcoord_min = xcoord_dimension['valid_range'][0]
    xcoord_max = xcoord_dimension['valid_range'][1]

    req["bounds"] = [ycoord_max, ycoord_min, xcoord_min, xcoord_max]
    print(req["bounds"])

    #adjust bounds order for erddap if needed
    qbounds = getRequestBounds(md, req)
    print(qbounds)

    # Set the name of the parameter to access
    parameter = 'seaice_conc_monthly_cdr'

    # You can reduce the resolution of the data returned from the server
    # This can be helpful during testing if the dataset is very large
    # Set this to one for full resolution, two for half, and so on
    sub = '1'
    print(req["sub"])

    #shp = shapereader.Reader('/home/jenn/aerdData/shoreline/GSHHS_shp/i/GSHHS_i_L6')

    timestamp = req["time"]
    timestamp = '2016-08-17T00:00:00Z'
    print(timestamp)
    m0 = datetime.now()
    dataset = getData(dId, parameter, qbounds, timestamp, sub)
    m1 = datetime.now()

    data3412 = dataset['data']

    m0 = datetime.now()
    # check data file for expected altitude dimension response
    if len(np.shape(data3412.variables[parameter])) == 3:
        data = data3412.variables[parameter][0, :, :]
    elif len(np.shape(data3412.variables[parameter])) == 4:
        data = data3412.variables[parameter][0, 0, :, :]

    xgrid = data3412.variables['xgrid'][:]
    ygrid = data3412.variables['ygrid'][:]

    xmin = float(xgrid[0])

    xmax = float(xgrid[len(xgrid) - 1])
    ymin = float(ygrid[len(ygrid) - 1])
    ymax = float(
        ygrid[0]
    )  # note: ymax should be bigger than ymin and is towards top of plot

    # output projected bounds corner points for leaflet
    # format to pass is top left, bottom left, bottom right, top right
    top_left = [xmin, ymax]
    bottom_left = [xmin, ymin]
    bottom_right = [xmax, ymin]
    top_right = [xmax, ymax]
    req["boundsProjected"] = [top_left, bottom_left, bottom_right, top_right]

    #xrange = abs(xmin) + abs(xmax)
    #yrange = abs(ymin) + abs(ymax)

    cellWidth = 25000  #25 km = 25000 meters
    cellHeight = 25000

    xgridMod = xgrid - (cellWidth / 2)

    extraX = xgridMod[len(xgridMod) - 1] + cellWidth
    xgridMod = np.append(xgridMod, extraX)
    ygridMod = ygrid + (cellHeight / 2)
    extraY = ygridMod[len(ygridMod) - 1] - cellHeight
    ygridMod = np.append(ygridMod, extraY)
    #adjust start left by half pixel width (move to the left)
    startLeft = xgrid[0] - (cellWidth / 2)
    #adjust start top by half pixel width ( move towards the top)
    startTop = ygrid[0] + (cellHeight / 2)

    rows = len(ygrid) - 1
    cols = len(xgrid) - 1

    m0 = datetime.now()
    X, Y = np.meshgrid(xgrid, ygrid)  #create the grid
    points = np.array((X.flatten(), Y.flatten())).T  #break it down
    mask = studyAreaPath.contains_points(points).reshape(
        X.shape)  # calc and grid a mask based on polygon path

    datamasked = np.ma.masked_where(mask != True,
                                    data)  # create masked data array

    study_area_data_cells = datamasked.count()
    print(study_area_data_cells)  #print(np.asscalar(study_area_data_cells))

    try:
        mymin = np.min(datamasked[~datamasked.mask])
    except ValueError:
        print('no data within polygon for this timestamp')
        print(
            'either before start of data (pre 1987) or all ice concentration values are 0 (summertime)'
        )
    else:
        if "num_data_cells" not in study_area_info:
            print("some ice in polygon, can calculate number of cells")
            study_area_info["num_data_cells"] = np.asscalar(
                study_area_data_cells)

    # fill data = 255, land = 254, coastline = 253, lakes = 252
    # check for any non-data values
    fillSpot = np.where(datamasked == 255)
    for i in range(len(fillSpot)):
        if fillSpot[i]: print('FOUND A FILL VALUE')
    landSpot = np.where(datamasked == 254)
    for i in range(len(landSpot)):
        if landSpot[i]: print('FOUND A LAND VALUE')
    coastSpot = np.where(datamasked == 253)
    for i in range(len(coastSpot)):
        if coastSpot[i]: print('FOUND A COAST VALUE')
    lakeSpot = np.where(datamasked == 252)
    for i in range(len(lakeSpot)):
        if lakeSpot[i]: print('FOUND A LAKE VALUE')
    m1 = datetime.now()
    print(m1 - m0)

    epsg3412 = ccrs.epsg(3412)

    #show all erddap data within study area
    thisproj = ccrs.SouthPolarStereo()
    # Notes on cartopy map projection options
    # cannot directly specify the projection with a proj4 string or a crs id
    # different projections have different options that can be passed to them
    # south polar stereo, north polar stereo and plate caree are the PW standard map projections
    # I haven't been passing other options to the plots but now that they will go on maps I may have to.
    # Albers is an option and I would have to pass it some options to get it centered on alaska I think.
    # Documentation says that specifying crs with espg via epsg.io should work. Not working for espg 3031 or 3412 though

    fig = plt.figure()
    ax1 = plt.axes(projection=thisproj)  # set projection to sps
    ax1.set_global()
    '''
    #add shoreline shape file as plate carree
    for record, geometry in zip(shp.records(), shp.geometries()):
        ax1.add_geometries([geometry], ccrs.PlateCarree(), facecolor='lightgray',
                          edgecolor='black')
    '''
    dataplt = ax1.pcolormesh(xgridMod,
                             ygridMod,
                             datamasked,
                             transform=epsg3412,
                             vmin=0.0,
                             vmax=1.0)

    #ax1.set_extent([-3100000, -1200000, 300000, 2300000], thisproj)
    print(ymin, ymax, xmin, xmax)

    # set_extent is (x0, x1, y0, y1)
    #ax1.set_extent([ xmin-550000, xmax+550000, ymin-550000, ymax+550000], thisproj) # expand plot bounds
    ax1.set_extent(
        [xmin, xmax, ymin, ymax],
        thisproj)  # a little off only because i made the polygon by hand
    #ax1.gridlines(alpha='0.3')
    #ax1.outline_patch.set_linewidth(0.5)
    ax1.outline_patch.set_visible(False)
    ax1.background_patch.set_visible(False)
    #ax1.coastlines(color='red')
    #ax1.outline_patch.set_edgecolor('#dddddd')
    #cbar = fig.colorbar(dataplt)
    imagefn = mapImageDirRoot + 'testoutput_1500' + timestamp + '.png'
    print(imagefn)
    m1 = datetime.now()
    print(m1 - m0)
    plt.savefig(imagefn, dpi=300, bbox_inches='tight', transparent=True)
    m1 = datetime.now()
    print(m1 - m0)
    plt.show()
    plt.cla()
    plt.clf()
    plt.close()
Exemple #5
0
def polygons_to_geom_dicts(polygons, skip_invalid=True):
    """
    Converts a Polygons element into a list of geometry dictionaries,
    preserving all value dimensions.

    For array conversion the following conventions are applied:

    * Any nan separated array are converted into a MultiPolygon
    * Any array without nans is converted to a Polygon
    * If there are holes associated with a nan separated array
      the holes are assigned to the polygons by testing for an
      intersection
    * If any single array does not have at least three coordinates
      it is skipped by default
    * If skip_invalid=False and an array has less than three
      coordinates it will be converted to a LineString
    """
    interface = polygons.interface.datatype
    if interface == 'geodataframe':
        return [row.to_dict() for _, row in polygons.data.iterrows()]
    elif interface == 'geom_dictionary':
        return polygons.data

    polys = []
    xdim, ydim = polygons.kdims
    has_holes = polygons.has_holes
    holes = polygons.holes() if has_holes else None
    for i, polygon in enumerate(polygons.split(datatype='columns')):
        array = np.column_stack([polygon.pop(xdim.name), polygon.pop(ydim.name)])
        splits = np.where(np.isnan(array[:, :2].astype('float')).sum(axis=1))[0]
        arrays = np.split(array, splits+1) if len(splits) else [array]

        invalid = False
        subpolys = []
        subholes = None
        if has_holes:
            subholes = [[LinearRing(h) for h in hs] for hs in holes[i]]
        for j, arr in enumerate(arrays):
            if j != (len(arrays)-1):
                arr = arr[:-1] # Drop nan

            if len(arr) == 0:
                continue
            elif len(arr) == 1:
                if skip_invalid:
                    continue
                poly = Point(arr[0])
                invalid = True
            elif len(arr) == 2:
                if skip_invalid:
                    continue
                poly = LineString(arr)
                invalid = True
            elif not len(splits):
                poly = Polygon(arr, (subholes[j] if has_holes else []))
            else:
                poly = Polygon(arr)
                hs = [h for h in subholes[j]] if has_holes else []
                poly = Polygon(poly.exterior, holes=hs)
            subpolys.append(poly)

        if invalid:
            polys += [dict(polygon, geometry=sp) for sp in subpolys]
            continue
        elif len(subpolys) == 1:
            geom = subpolys[0]
        elif subpolys:
            geom = MultiPolygon(subpolys)
        else:
            continue
        polygon['geometry'] = geom
        polys.append(polygon)
    return polys
Exemple #6
0
    def __init__(self, filename, ds=None, field_parameters=None, crs=None):
        validate_object(ds, Dataset)
        validate_object(field_parameters, dict)
        self.src_crs = crs
        if isinstance(filename, str):
            self.filename = filename

            # read shapefile with fiona
            with fiona.open(filename, "r") as shapefile:
                shapes_from_file = [
                    feature["geometry"] for feature in shapefile
                ]
                self.src_crs = CRS.from_dict(**shapefile.crs)  # shapefile crs

            # save number of polygons
            self._number_features = len(shapes_from_file)

            # reproject to datasets crs
            for i in range(self._number_features):
                shapes_from_file[i] = transform_geom(
                    f'EPSG:{self.src_crs.to_epsg()}',
                    f'EPSG:{ds.parameters["crs"].to_epsg()}',
                    shapes_from_file[i])
            # convert all polygon features in shapefile to list of shapely polygons
            polygons = [
                Polygon(shapes_from_file[i]["coordinates"][0])
                for i in range(self._number_features)
            ]
            # fix invalid MultiPolygons
            m = MultiPolygon(polygons)
            # join all shapely polygons to a single layer
            self.polygon = unary_union(m)

        elif isinstance(filename, Polygon):
            # only one polygon
            self._number_features = 1
            self.polygon = filename
            if not (self.src_crs is None):
                self._reproject_polygon(ds.parameters['crs'])

        elif isinstance(filename, MultiPolygon):
            # only one polygon
            self._number_features = len(filename.geoms)
            self.polygon = unary_union(filename)
            if not (self.src_crs is None):
                self._reproject_polygon(ds.parameters['crs'])

        elif isinstance(filename, list):
            # assume list of shapely polygons
            self._number_features = len(filename)
            # fix invalid MultiPolygons
            m = MultiPolygon(filename)
            # join all shapely polygons to a single layer
            self.polygon = unary_union(m)
            if not (self.src_crs is None):
                self._reproject_polygon(ds.parameters['crs'])

        mylog.info(
            f"Number of features in poly object: {self._number_features}")

        # define coordinates of center
        self.center = [
            self.polygon.centroid.coords.xy[0][0],
            self.polygon.centroid.coords.xy[1][0],
        ]

        data_source = None
        super().__init__(self.center, ds, field_parameters, data_source)
Exemple #7
0
shp.close

w, h = coords[2] - coords[0] - 260, coords[3] - coords[1]

m = Basemap(width=12000000,
            height=8000000,
            resolution='l',
            projection='laea',
            lat_1=35.,
            lat_2=55,
            lat_0=45,
            lon_0=-107.)

_out = m.readshapefile(shapefilename,
                       name='ecoregions',
                       drawbounds=False,
                       color='none',
                       zorder=2)
df_map = pd.DataFrame({
    'poly': [Polygon(region) for region in m.ecoregions],
    'name': [region['ECOCODE'] for region in m.ecoregions_info]
})
mapped_points = [
    Point(m(mapped_x, mapped_y))
    for mapped_x, mapped_y in zip(ld['lon'], ld['lat'])
]
all_points = MultiPoint(mapped_points)

region_polygons = prep(MultiPolygon(list(df_map['poly'].values)))
region_points = filter(region_polygons.contains, all_points)
Exemple #8
0
def plotproj(plotdef, data, outdir):
    '''
    Plot map.
    '''
    axes = plt.axes()

    bounds = (plotdef['lonmin'], plotdef['latmin'], plotdef['lonmax'], plotdef['latmax'])
    for geom in data.filter(bbox=bounds):
        temp_pol = shape(geom['geometry'])

        box = Polygon([
            (plotdef['lonmin'], plotdef['latmin']),
            (plotdef['lonmin'], plotdef['latmax']),
            (plotdef['lonmax'], plotdef['latmax']),
            (plotdef['lonmax'], plotdef['latmin']),
        ])
        try:
            temp_pol = temp_pol.intersection(box)
        except Exception as e:
            continue


        if plotdef['type'] == 'poly':
            if isinstance(temp_pol, MultiPolygon):
                polys = [resample_polygon(polygon) for polygon in temp_pol]
                pol = MultiPolygon(polys)
            else:
                pol = resample_polygon(temp_pol)
        else:
            pol = temp_pol

        trans = functools.partial(project_xy, proj_string=plotdef['projstring'])
        proj_geom = transform(trans, pol)

        if plotdef['type'] == 'poly':
            try:
                patch = PolygonPatch(proj_geom, fc=COLOR_LAND, zorder=0)
                axes.add_patch(patch)
            except TypeError:
                pass
        else:
            x, y = proj_geom.xy
            axes.plot(x, y, color=COLOR_COAST, linewidth=0.5)

    # Plot frame
    frame = [
        parallel(plotdef['latmin'], plotdef['lonmin'], plotdef['lonmax']),
        parallel(plotdef['latmax'], plotdef['lonmin'], plotdef['lonmax']),
    ]
    for line in frame:
        line = project(line, plotdef['projstring'])
        x = line[:, 0]
        y = line[:, 1]
        axes.plot(x, y, '-k')

    graticule = build_graticule(
        plotdef['lonmin'],
        plotdef['lonmax'],
        plotdef['latmin'],
        plotdef['latmax'],
    )

    # Plot graticule
    for feature in graticule:
        feature = project(feature, plotdef['projstring'])
        x = feature[:, 0]
        y = feature[:, 1]
        axes.plot(x, y, color=COLOR_GRAT, linewidth=0.4)

    # Switch off the axis lines...
    plt.axis('off')
    # ... and additionally switch off the visibility of the axis lines and
    # labels so they can be removed by "bbox_inches='tight'" when saving
    axes.get_xaxis().set_visible(False)
    axes.get_yaxis().set_visible(False)

    font = {
        'family': 'serif',
        'color': 'black',
        'style': 'italic',
        'size': 12
    }

    # Make sure the plot is not stretched
    axes.set_aspect('equal')

    if not os.path.exists(outdir):
        os.makedirs(outdir)
    plt.savefig(outdir + '/' + plotdef['filename'],
                dpi=400,
                bbox_inches='tight')

    # Clean up
    axes = None
    del axes
    plt.close()
Exemple #9
0
        #     chko_geom = shape(chko["geometry"])
        #     for hw in buffered_highways:
        #         if chko_geom.intersects(hw):
        #             print("intersection")
        #             out_geom = hw.intersection(chko_geom)
        #             intersections.append(out_geom)
        # print(intersections)
        schema = {
            'properties': {
                'highway': 'str'
            },
            'geometry': 'MultiPolygon'
        }  # predpis vzstupniho souboru

        with fiona.open(os.path.join(cesta, "chko_x_highway.gpkg"),
                        "w",
                        driver="GPKG",
                        schema=schema) as out:
            # vytvorim novy object
            for g in buffered_highways:
                feature = {
                    'type': 'Feature',
                    'properties': {
                        'highway': 'D8'
                    },
                    'geometry': mapping(
                        MultiPolygon([g])
                    )  # prevod na multipoly, poly nedovoli zapsat kvuli schematu
                }
                out.write(feature)
Exemple #10
0
    def feature_collection(self, ft_name, **kwargs):
        """Retrieve the feature collection given feature.

        Args:
            ft_name (str): the feature name whose you are interested in.
             **kwargs: Keyword arguments:
                max_features (int, optional): the number of records to get
                attributes(list, tuple, str, optional): the list, tuple or string of attributes you are interested in
                            to have the feature collection.
                within(str, optional): a Polygon/MultiPolygon in Well-known text (WKT) format used filter features
                filter(list, tuple, str, optional): the list, tuple or string of cql filter
                    (http://docs.geoserver.org/latest/en/user/filter/function_reference.html#filter-function-reference)
                sort_by(list, tuple, str, optional(: the list, tuple or string of attributes used to sort resulting
                    collection

        Raises:
            ValueError: if latitude or longitude is out of range or any mandatory parameter is missing.
            AttributeError: if found an unexpected parameter or unexpected type
            Exception: if the service returns a exception
        """
        if not ft_name:
            raise ValueError("Missing feature name.")

        invalid_parameters = set(kwargs) - {"max_features", "attributes", "filter", "sort_by"}

        if invalid_parameters:
            raise AttributeError('invalid parameter(s): {}'.format(invalid_parameters))

        feature_desc = self.describe_feature(ft_name)
        geometry_name = None
        if 'geometry' in feature_desc:
            geometry_name = feature_desc['geometry']['name']

        data = {
            'typeName': ft_name
        }

        if 'max_features' in kwargs:
            data['maxFeatures'] = kwargs['max_features']

        if 'attributes' in kwargs:
            if type(kwargs['attributes']) in [list, tuple]:
                kwargs['attributes'] = ",".join(kwargs['attributes'])
            elif not type(kwargs['attributes']) is str:
                raise AttributeError('attributes must be a list, tuple or string')
            if geometry_name != None:
                if len(kwargs['attributes']) > 0:
                    data['propertyName'] = "{},{}".format(geometry_name, kwargs['attributes'])
            else:
                data['propertyName'] = kwargs['attributes']
        if 'sort_by' in kwargs:
            if type(kwargs['sort_by']) in [list, tuple]:
                kwargs['sort_by'] = ",".join(kwargs['sort_by'])
            elif not type(kwargs['sort_by']) is str:
                raise AttributeError('sort_by must be a list, tuple or string')
            data['sortBy'] = kwargs['sort_by']

        if 'filter' in kwargs:
            if type(kwargs['filter']) is not str:
                raise AttributeError('filter must be a string')
            if geometry_name is not None:
                data['CQL_FILTER'] = kwargs['filter'].replace("#geom#", geometry_name)
            else:
                data['CQL_FILTER'] = kwargs['filter']

        body = ""
        for key, value in data.items():
            if value:
                body += "&{}={}".format(key, value)
        doc = self._post("{}/{}&request=GetFeature".format(self.host, self.base_path), data=body[1:])

        if 'exception' in doc:
            raise Exception(doc["exception"])

        js = json.loads(doc)

        fc = dict()
        fc['total_features'] = js['totalFeatures']
        fc['total'] = len(js['features'])
        fc['features'] = []
        for item in js['features']:
            if geometry_name is not None:
                if feature_desc['geometry']['type'] == 'gml:Point':
                    feature = {'geometry': Point(item['geometry']['coordinates'][0], item['geometry']['coordinates'][1])}
                elif feature_desc['geometry']['type'] == 'gml:MultiPolygon':
                    polygons = []
                    for polygon in item['geometry']['coordinates']:
                        polygons += [Polygon(lr) for lr in polygon]
                    feature = {'geometry': MultiPolygon(polygons)}
                elif feature_desc['geometry']['type'] == 'gml:Polygon':
                    feature = {'geometry': Polygon(item['geometry']['coordinates'][0])}
                else:
                    raise Exception('Unsupported geometry type.')
            else:
                feature = {}
            feature.update(item['properties'])
            fc['features'].append(feature)
        fc['crs'] = js['crs']
        return fc
    # kth를 score list에 저장
    ant = ant + 1
    score.append({ant, kth, coverratio, distanceA})
    kscore = pd.DataFrame({
        "개미번호": [ant],
        "카메라댓수": [kth],
        "커버율": [coverratio],
        "최소탐지거리A": [distanceA],
        "최소탐지거리B": [distanceB],
        "최소탐지거리C": [distanceC]
    })
    tscore = tscore.append(kscore)

tscore = tscore.reset_index(drop=True)
best_score = tscore.sort_values(by=['카메라댓수', '커버율'],
                                axis=0,
                                ascending=[True, False])
best_score = best_score.reset_index(drop=True)
selection = best_score.iloc[0, 0]
min_number = best_score.iloc[0, 1]
globals()['intersect{}'.format(selection)]

mincircleA = Point(weak1).buffer(best_score.iloc[0, 3])
mincircleB = Point(weak2).buffer(best_score.iloc[0, 4])
mincircleC = Point(weak3).buffer(best_score.iloc[0, 5])

circlepoly = MultiPolygon([
    mincircleA, mincircleB, mincircleC, mypoly,
    globals()['totalview{}'.format(best_score.iloc[0, 0])]
])
circlepoly
Exemple #12
0
 def remake_scene_to_resolution(self, scene, resolution):
     final_polygons = []
     for poly in scene.scene.geoms:
         poly = self.cut_polygon_to_resolution(poly, resolution)
         final_polygons.append(poly)
     return Scene(MultiPolygon(final_polygons))
Exemple #13
0
 def create_scene_from_specs(self, specs):
     polys = []
     for poly in specs:
         polys.append(Polygon(poly))
     return Scene(MultiPolygon(polys))
Exemple #14
0
    def create_scene(self,
                     total_polygons=30,
                     max_skew=None,
                     max_x=None,
                     max_y=None,
                     max_scale=None,
                     max_rot=None,
                     max_tries=5,
                     min_clearance=0.1,
                     resolution=None):
        """
        Creates a Scene instance consisting of a MultiPloygon instance with total_polygons number of polygons
        which are created according to the method described in create_random_shape above.
        The scene generated has to be valid (i.e. intersection between polygons has to have zero area). If after sampling
        total_polygons*max_tries polygons it is unable to find a scene with that many polygons, it will return the maximum number
        of polygons it was able to fit in the scene, alongside an error message warning the user that the total number
        of polygons contained in the scene is smaller than the commanded. 

        Args:
            total_polygons (int): total number of polygons that you wish to include in the scene
            max_skew (float): maximum angle by which each individual shape might be skewed, default = 30
            max_x (float): Maximum X coordinate of the bounding box, default = 50
            max_y (float): Maximum Y coordinate of the bounding box, default = 50
            max_scale (float): Maximum scaling of each individual polygon , default = 5 (i.e. 5 is the largest side of a square)
            max_rot (float): Maximum angle of rotation of each individual polygon
            max_tries (int): Number of attempts this code is allowed to sample from random polygons
            in its attempt to create a valid scene with that number of polygons. The actual number of attempts is 
            given by max_tries*total_polygons.
            min_clearance(float): minimum distance between all the geometries in the scene
        Returns:
            scene (Scene) a Scene instance containing the selected number of randomly sampled polygons
        """
        if (max_skew):
            self.max_skew = max_skew
        if (max_x):
            self.max_x = max_x
        if (max_y):
            self.max_y = max_y
        if (max_scale):
            self.max_scale = max_scale
        if (max_rot):
            self.max_rot = max_rot
        if (min_clearance):
            self.min_clearance = min_clearance
        if (resolution):
            self.resolution = resolution
        max_tries = max_tries * total_polygons
        included_polygons = []
        acceptable_polygons = 0
        tries = 0
        poly = self.create_random_shape()
        poly = self.cut_polygon_to_resolution(poly, self.resolution)
        included_polygons = [poly]
        mp = MultiPolygon(included_polygons)
        while (acceptable_polygons < total_polygons):
            shape = self.create_random_shape()
            #             print(shape.area)
            #             display(shape)
            if (shape.area > self.min_area_threshold):
                distance = shape.distance(mp)
                if (distance > self.min_clearance):
                    #                     print(mp.is_valid)
                    shape = self.cut_polygon_to_resolution(
                        shape, self.resolution)
                    included_polygons += [shape]
                    #                     print(included_polygons)
                    mp = MultiPolygon(included_polygons)
                    acceptable_polygons += 1
            tries += 1
            if (tries > max_tries):
                print(
                    """Failed to Produce a viable solution. Increase the bounding box 
                      (max_x,max_y),reduce the number of polygons in the scene or 
                      try again with the same parameters but higher max_tries multiplier
                      Default (5). 
                      Returning the scene with maximum number of polygons within the 
                      maximum number of tries""")
                return Scene(MultiPolygon(included_polygons))

        print("Scene Generated Sucessfully. Returning the scene")
        return Scene(MultiPolygon(included_polygons))
Exemple #15
0
 def implode(self) -> gpd.GeoDataFrame:
     return gpd.GeoDataFrame(
         {"geometry": MultiPolygon([polygon.geometry for polygon
                                    in self().itertuples()])},
         crs=self._grd.crs)
Exemple #16
0
import matplotlib.pyplot as plt
from matplotlib.collections import PatchCollection
from descartes import PolygonPatch
import fiona
from shapely.geometry import Polygon, MultiPolygon, shape

# We can extract the London Borough boundaries by filtering on the AREA_CODE key
mp = MultiPolygon(
    [shape(pol['geometry']) for pol in fiona.open('data/boroughs/boroughs.shp')
    if pol['properties']['AREA_CODE'] == 'LBO'])

# We can now do GIS-ish operations on each borough polygon!
# we could randomize this by dumping the polygons into a list and shuffling it
# or we could define a random colour using fc=np.random.rand(3,)
# available colour maps are here: http://wiki.scipy.org/Cookbook/Matplotlib/Show_colormaps
cm = plt.get_cmap('RdBu')
num_colours = len(mp)
 
fig = plt.figure()
ax = fig.add_subplot(111)
minx, miny, maxx, maxy = mp.bounds
w, h = maxx - minx, maxy - miny
ax.set_xlim(minx - 0.2 * w, maxx + 0.2 * w)
ax.set_ylim(miny - 0.2 * h, maxy + 0.2 * h)
ax.set_aspect(1)

patches = []
for idx, p in enumerate(mp):
    colour = cm(1. * idx / num_colours)
    patches.append(PolygonPatch(p, fc=colour, ec='#555555', alpha=1., zorder=1))
ax.add_collection(PatchCollection(patches, match_original=True))
Exemple #17
0
 def multipolygon(self) -> MultiPolygon:
     geometry = self.implode().iloc[0].geometry
     if not isinstance(geometry, MultiPolygon):
         geometry = MultiPolygon([geometry])
     return geometry
Exemple #18
0
def vectorizeRaster(infile, outfile, classes, classfile, weight, nodata,
                    smoothing, band, cartoCSS, axonometrize, nosimple,
                    setNoData, nibbleMask, outvar):

    with rasterio.drivers():
        with rasterio.open(infile, 'r') as src:

            try:
                band = int(band)
            except:
                raise ValueError('Band must be an integer')

            inarr = src.read_band(band)
            oshape = src.shape
            oaff = src.affine

            if (type(setNoData) == int
                    or type(setNoData) == float) and hasattr(inarr, 'mask'):
                inarr[np.where(inarr.mask == True)] = setNoData
                nodata = True
            # elif globeWrap and hasattr(inarr, 'mask'):
            #     nodata = True

            #simplification threshold
            simplest = ((src.bounds.top - src.bounds.bottom) /
                        float(src.shape[0]))

            #handle dif nodata situations
            if nodata == 'min':
                maskArr = np.zeros(inarr.shape, dtype=np.bool)
                maskArr[np.where(inarr == inarr.min())] = True
                inarr = np.ma.array(inarr, mask=maskArr)
                del maskArr
            elif type(nodata) == int or type(nodata) == float:
                maskArr = np.zeros(inarr.shape, dtype=np.bool)
                maskArr[np.where(inarr == nodata)] = True
                inarr = np.ma.array(inarr, mas=maskArr)
                del maskArr
            elif src.meta['nodata'] == None or np.isnan(
                    src.meta['nodata']) or nodata:
                maskArr = np.zeros(inarr.shape, dtype=np.bool)
                inarr = np.ma.array(inarr, mask=maskArr)
                del maskArr
            elif (type(src.meta['nodata']) == int or type(src.meta['nodata'])
                  == float) and hasattr(inarr, 'mask'):
                nodata = True

            if nibbleMask:
                inarr.mask = maximum_filter(inarr.mask, size=3)

    if smoothing and smoothing > 1:
        inarr, oaff = zoomSmooth(inarr, smoothing, oaff)

    else:
        smoothing = 1

    if classfile:
        with open(classfile, 'r') as ofile:
            classifiers = ofile.read().split(',')
            classRas, breaks = classifyManual(
                inarr,
                np.array(classifiers).astype(inarr.dtype))
    elif classes == 'all':
        classRas, breaks = classifyAll(inarr)
    else:
        classRas, breaks = classify(inarr, int(classes), weight)

    # filtering for speckling
    classRas = median_filter(classRas, size=2)

    # print out cartocss for classes
    if cartoCSS:
        for i in breaks:
            click.echo('[value = ' + str(breaks[i]) +
                       '] { polygon-fill: @class' + str(i) + '}')

    schema = {'geometry': 'MultiPolygon', 'properties': {outvar: 'float'}}

    with fiona.open(outfile, "w", "ESRI Shapefile", schema,
                    crs=src.crs) as outshp:
        tRas = np.zeros(classRas.shape, dtype=np.uint8)
        click.echo("Vectorizing: ", nl=False)
        for i, br in enumerate(breaks):
            click.echo("%d, " % (br), nl=False)
            tRas[np.where(classRas >= i)] = 1
            tRas[np.where(classRas < i)] = 0
            if nodata:
                tRas[np.where(classRas == 0)] = 0
            for feature, shapes in features.shapes(np.asarray(tRas, order='C'),
                                                   transform=oaff):
                if shapes == 1:
                    featurelist = []
                    for c, f in enumerate(feature['coordinates']):
                        if len(f) > 5 or c == 0:
                            if axonometrize:
                                f = np.array(f)
                                f[:, 1] += (axonometrize * br)
                            if nosimple:
                                poly = Polygon(f)
                            else:
                                poly = Polygon(f).simplify(
                                    simplest / float(smoothing),
                                    preserve_topology=True)
                            featurelist.append(poly)
                    if len(featurelist) != 0:
                        oPoly = MultiPolygon(featurelist)
                        outshp.write({
                            'geometry': mapping(oPoly),
                            'properties': {
                                outvar: br
                            }
                        })
Exemple #19
0
    def add_patch(
            self,
            multipolygon: Union[MultiPolygon, Polygon],
            expansion_rate: Optional[float] = None,
            target_size: Optional[float] = None,
            nprocs: Optional[int] = None
            ) -> None:
        """Add refinement as a region of fixed size with an optional rate

        Add a refinement based on a region specified by `multipolygon`.
        The fixed `target_size` refinement can be expanded outside the
        region specified by the shape if `expansion_rate` is provided.

        Parameters
        ----------
        multipolygon : MultiPolygon or Polygon
            Shape of the region to use specified `target_size` for
            refinement.
        expansion_rate : float or None, default=None
            Optional rate to use for expanding refinement outside
            the specified shape in `multipolygon`.
        target_size : float or None, default=None
            Fixed target size of mesh to use for refinement in
            `multipolygon`
        nprocs : int or None, default=None
            Number of processors to use in parallel sections of the
            algorithm

        Returns
        -------
        None

        See Also
        --------
        add_feature :
            Add refinement for specified line string
        """

        # TODO: Add pool input support like add_feature for performance

        # TODO: Support other shapes - call buffer(1) on non polygons(?)
        if not isinstance(multipolygon, (Polygon, MultiPolygon)):
            raise TypeError(
                    f"Wrong type \"{type(multipolygon)}\""
                    f" for multipolygon input.")

        if isinstance(multipolygon, Polygon):
            multipolygon = MultiPolygon([multipolygon])

        # Check nprocs
        nprocs = -1 if nprocs is None else nprocs
        nprocs = cpu_count() if nprocs == -1 else nprocs
        _logger.debug(f'Using nprocs={nprocs}')


        # check target size
        target_size = self.hmin if target_size is None else target_size
        if target_size is None:
            # TODO: Is this relevant for mesh type?
            raise ValueError('Argument target_size must be specified if no '
                             'global hmin has been set.')
        if target_size <= 0:
            raise ValueError("Argument target_size must be greater than zero.")

        # For expansion_rate
        if expansion_rate is not None:
            exteriors = [ply.exterior for ply in multipolygon]
            interiors = [
                inter for ply in multipolygon for inter in ply.interiors]

            features = MultiLineString([*exteriors, *interiors])
            # pylint: disable=E1123, E1125
            self.add_feature(
                feature=features,
                expansion_rate=expansion_rate,
                target_size=target_size,
                nprocs=nprocs)

        coords = self.mesh.msh_t.vert2['coord']
        values = self.mesh.msh_t.value

        verts_in = utils.get_verts_in_shape(
            self.mesh.msh_t, shape=multipolygon, from_box=False)

        if len(verts_in):
            # NOTE: Don't continue, otherwise the final
            # destination file might end up being empty!
            values[verts_in, :] = target_size

        # NOTE: unlike raster self.hmin is based on values of this
        # hfun before applying feature; it is ignored so that
        # the new self.hmin becomes equal to "target" specified
#        if self.hmin is not None:
#            values[np.where(values < self.hmin)] = self.hmin
        if self.hmax is not None:
            values[np.where(values > self.hmax)] = self.hmax
        values = np.minimum(self.mesh.msh_t.value, values)
        values = values.reshape(self.mesh.msh_t.value.shape)

        self.mesh.msh_t.value = values
Exemple #20
0
outl2_closed = outl2 + outl2[:1]

r1 = Regions(
    name=name, numbers=numbers, names=names, abbrevs=abbrevs, outlines=outlines
)

numbers = [1, 2]
names = {1: "Unit Square1", 2: "Unit Square2"}
abbrevs = {1: "uSq1", 2: "uSq2"}
poly1 = Polygon(outl1)
poly2 = Polygon(outl2)
poly = {1: poly1, 2: poly2}

r2 = Regions(name=name, numbers=numbers, names=names, abbrevs=abbrevs, outlines=poly)

multipoly = [MultiPolygon([poly1, poly2])]
r3 = Regions(multipoly)
# polygons are automatically closed
outl_multipoly = np.concatenate((outl1_closed, [[np.nan, np.nan]], outl2_closed))

# =============================================================================


def test__subsample():
    lon, lat = _subsample([[0, 1], [1, 0]])
    res = np.concatenate((np.linspace(1, 0), np.linspace(0, 1)))
    assert np.allclose(lon, res)


# =============================================================================
    print(g.wkt)

###############################################################################
c.geoms[0].wkt
c[1].wkt
###############################################################################
from shapely.geometry import MultiPoint
points = MultiPoint([(0.0, 0.0), (1.0, 1.0)])
points.area
points.length
points.bounds
###############################################################################
from shapely.geometry import MultiLineString
coords = [((0, 0), (1, 1)), ((-1, 0), (1, 0))]
lines = MultiLineString(coords)
lines.area
lines.length
lines.bounds
len(lines.geoms)
###############################################################################
polygon = [(0, 0), (1, 1), (1, 2), (2, 2), (0, 0)]
s = [(10, 0), (21, 1), (31, 2), (24, 2), (10, 0)]
t = [(0, 50), (1, 21), (1, 22), (32, 2), (0, 50)]
from shapely.geometry import Polygon
p_a, s_a, t_a = [Polygon(x) for x in [polygon, s, t]]
from shapely.geometry import MultiPolygon
polygons = MultiPolygon([p_a, s_a, t_a])
len(polygons.geoms)
len(polygons)
polygons.bounds
Exemple #22
0
 def merge_all(self):
     self.__parcels = MultiPolygon([self.__polygon])
     self.__update()
     return self.parcels
Exemple #23
0
    def func_checker(*args, **kwargs):
        """
        A decorator to split and reproject polygon vectors in a GeoDataFrame whose values cross the Greenwich Meridian.
         Begins by examining whether the geometry bounds the supplied cross longitude = 0 and if so, proceeds to split
         the polygons at the meridian into new polygons and erase a small buffer to prevent invalid geometries when
         transforming the lons from WGS84 to WGS84 +lon_wrap=180 (longitudes from 0 to 360).

         Returns a GeoDataFrame with the new features in a wrap_lon WGS84 projection if needed.
        """
        try:
            poly = kwargs["poly"]
            x_dim = kwargs["x_dim"]
            wrap_lons = kwargs["wrap_lons"]
        except KeyError:
            return func(*args, **kwargs)

        if wrap_lons:
            if (np.min(x_dim) < 0
                    and np.max(x_dim) >= 360) or (np.min(x_dim) < -180
                                                  and np.max >= 180):
                warnings.warn(
                    "Dataset doesn't seem to be using lons between 0 and 360 degrees or between -180 and 180 degrees."
                    " Tread with caution.",
                    UserWarning,
                    stacklevel=4,
                )
            split_flag = False
            for (index, feature) in poly.iterrows():
                if (feature.geometry.bounds[0] <
                        0) and (feature.geometry.bounds[2] > 0):
                    split_flag = True
                    warnings.warn(
                        "Geometry crosses the Greenwich Meridian. Proceeding to split polygon at Greenwich."
                        " This feature is experimental. Output might not be accurate.",
                        UserWarning,
                        stacklevel=4,
                    )

                    # Create a meridian line at Greenwich, split polygons at this line and erase a buffer line
                    if isinstance(feature.geometry, MultiPolygon):
                        union = MultiPolygon(cascaded_union(feature.geometry))
                    else:
                        union = Polygon(cascaded_union(feature.geometry))
                    meridian = LineString([Point(0, 90), Point(0, -90)])
                    buffered = meridian.buffer(0.000000001)
                    split_polygons = split(union, meridian)
                    # TODO: This doesn't seem to be thread safe in Travis CI on macOS. Merits testing with a local machine.
                    buffered_split_polygons = [
                        feat for feat in split_polygons.difference(buffered)
                    ]

                    # Cannot assign iterable with `at` (pydata/pandas#26333) so a small hack:
                    # Load split features into a new GeoDataFrame with WGS84 CRS
                    split_gdf = gpd.GeoDataFrame(
                        geometry=[cascaded_union(buffered_split_polygons)],
                        crs="epsg:4326",
                    )
                    poly.at[[index], "geometry"] = split_gdf.geometry.values
                    # split_gdf.columns = ["index", "geometry"]

                    # feature = split_gdf

            # Reproject features in WGS84 CSR to use 0 to 360 as longitudinal values
            poly = poly.to_crs(
                "+proj=longlat +ellps=WGS84 +lon_wrap=180 +datum=WGS84 +no_defs"
            )
            crs1 = poly.crs
            if split_flag:
                warnings.warn(
                    "Rebuffering split polygons to ensure edge inclusion in selection",
                    UserWarning,
                    stacklevel=4,
                )
                poly = gpd.GeoDataFrame(poly.buffer(0.000000001),
                                        columns=["geometry"])
                poly.crs = crs1

            kwargs["poly"] = poly

        return func(*args, **kwargs)
Exemple #24
0
def map_communities(infilename,
                    outfilebase,
                    dims,
                    target2,
                    size=30,
                    county=None,
                    drop_isolates=False,
                    use_largest=False):

    best_partition = {}
    with open(infilename, 'r') as infile:
        for line in infile:
            best_partition = json.loads(line)
            if "data" in best_partition:
                tmp = {}
                for k in best_partition["data"]:
                    tmp[k] = best_partition["data"][k]
                for k in best_partition["extrap"]:
                    tmp[k] = best_partition["extrap"][k]
                for k in best_partition["swap"]:
                    tmp[k] = best_partition["swap"][k]
                best_partition = tmp

    vmax = int(len(set(best_partition.values())))
    vmin = 0

    com_polys = {i: [] for i in range(vmax)}
    com_boxes = {i: [] for i in range(vmax)}

    if drop_isolates:
        neighbours = get_neighbours(best_partition, dims, target2, size,
                                    county)

    if not county:

        for box_id, box_number, mp in generate_land(dims,
                                                    target2,
                                                    size,
                                                    contains=False):
            if str(box_id) in best_partition:
                if (not drop_isolates) or (drop_isolates and not is_isolate(
                        neighbours, best_partition, str(box_id))):

                    ##draw costal edges
                    if target2.contains(mp):
                        com_polys[best_partition[str(box_id)]].append(
                            mp.buffer(0.0001))
                    else:
                        com_polys[best_partition[str(box_id)]].append(
                            target2.intersection(mp).buffer(0.0001))
                    com_boxes[best_partition[str(box_id)]].append(str(box_id))

    else:
        for place in best_partition:
            if (not drop_isolates) or (drop_isolates and not is_isolate(
                    neighbours, best_partition, place)):
                mp = MultiPolygon(county.lookup(place))
                if target2.contains(mp):
                    com_polys[best_partition[place]].append(mp.buffer(0.0001))
                else:
                    com_polys[best_partition[place]].append(
                        target2.intersection(mp).buffer(0.0001))
                com_boxes[best_partition[place]].append(place)

    for i in range(vmax):
        if use_largest:
            poly = cascaded_union(com_polys[i])
            if poly.geom_type == "MultiPolygon":
                poly = max(poly, key=lambda x: x.area)

            tmp_polys = []
            tmp_boxes = []
            for b in range(len(com_polys[i])):
                if poly.intersects(com_polys[i][b]):
                    tmp_boxes.append(com_boxes[i][b])
                    tmp_polys.append(com_polys[i][b])

            com_polys[i] = tmp_polys
            com_boxes[i] = tmp_boxes

        draw_map({place: 0
                  for place in com_boxes[i]},
                 outfilebase + str(i) + ".png",
                 dims,
                 target2,
                 size=size,
                 county=county)

        poly = cascaded_union(com_polys[i])
        with open(outfilebase + str(i) + ".txt", 'w') as outfile:

            if poly.geom_type == "Polygon":
                lons, lats = poly.exterior.coords.xy
                x, y = (lons, lats)
                exterior = list(zip(x, y))
                interior = []
                for p in poly.interiors:
                    lons, lats = p.coords.xy
                    x, y = (lons, lats)
                    interior.append(list(zip(x, y)))
                print([exterior, interior], file=outfile)

            if poly.geom_type == "MultiPolygon":
                for p in poly:
                    lons, lats = p.exterior.coords.xy
                    x, y = (lons, lats)
                    exterior = list(zip(x, y))
                    interior = []
                    for ip in p.interiors:
                        lons, lats = ip.coords.xy
                        x, y = (lons, lats)
                        interior.append(list(zip(x, y)))
                    print([exterior, interior], file=outfile)
def _format_shape_osm(bbox, result_NodesFromWays, result_NodesWaysFromRels,
                      item, save_path):
    """format edges, nodes and relations from overpy result objects into shapes
    Parameters:
        bbox
        result_NodesFromWays
        result_NodesWaysFromRels
        item
        save_path

    Returns:
        gdf_all: Geodataframe with Linestrings, Polygons & Multipolygons
    """
    # polygon vs. linestrings in nodes from ways result:

    schema_poly = {
        'geometry': 'Polygon',
        'properties': {
            'Name': 'str:80',
            'Natural_Type': 'str:80',
            'Item': 'str:80'
        }
    }
    schema_line = {
        'geometry': 'LineString',
        'properties': {
            'Name': 'str:80',
            'Natural_Type': 'str:80',
            'Item': 'str:80'
        }
    }
    shapeout_poly = save_path + '/' + str(item) + '_poly_' + str(int(bbox[0])) +\
    '_' + str(int(bbox[1])) + ".shp"
    shapeout_line = save_path + '/' + str(item) + '_line_' + str(int(bbox[0])) +\
    '_' + str(int(bbox[1])) + ".shp"

    way_poly = []
    way_line = []
    for way in result_NodesFromWays.ways:
        if (way.nodes[0].id == way.nodes[-1].id) & (len(way.nodes) > 2):
            way_poly.append(way)
        else:
            way_line.append(way)

    with fiona.open(shapeout_poly,
                    'w',
                    crs=from_epsg(4326),
                    driver='ESRI Shapefile',
                    schema=schema_poly) as output:
        for way in way_poly:
            geom = mapping(
                geometry.Polygon([node.lon, node.lat] for node in way.nodes))
            prop = {
                'Name': way.tags.get("name", "n/a"),
                'Natural_Type': way.tags.get("natural", "n/a"),
                'Item': item
            }
            output.write({'geometry': geom, 'properties': prop})

    with fiona.open(shapeout_line,
                    'w',
                    crs=from_epsg(4326),
                    driver='ESRI Shapefile',
                    schema=schema_line) as output2:
        for way in way_line:
            geom2 = {
                'type': 'LineString',
                'coordinates': [(node.lon, node.lat) for node in way.nodes]
            }
            prop2 = {
                'Name': way.tags.get("name", "n/a"),
                'Natural_Type': way.tags.get("natural", "n/a"),
                'Item': item
            }
            output2.write({'geometry': geom2, 'properties': prop2})

    gdf_poly = geopandas.read_file(shapeout_poly)
    for ending in ['.shp', ".cpg", ".dbf", ".prj", '.shx']:
        os.remove(save_path + '/' + str(item) + '_poly_' + str(int(bbox[0])) +
                  '_' + str(int(bbox[1])) + ending)
    gdf_line = geopandas.read_file(shapeout_line)
    for ending in ['.shp', ".cpg", ".dbf", ".prj", '.shx']:
        os.remove(save_path + '/' + str(item) + '_line_' + str(int(bbox[0])) +
                  '_' + str(int(bbox[1])) + ending)

    # add buffer to the lines (0.000045° are ~5m)
    for geom in gdf_line.geometry:
        geom = geom.buffer(0.000045)

    gdf_all = gdf_poly.append(gdf_line)

    # detect multipolygons in relations:
    print(
        'Converting results for %s to correct geometry and GeoDataFrame: MultiPolygons'
        % item)

    MultiPoly = []
    for relation in result_NodesWaysFromRels.relations:
        OuterList = []
        InnerList = []
        PolyList = []
        # get inner and outer parts from overpy results, convert into linestrings
        # to check for closedness later
        for relationway in relation.members:
            if relationway.role == 'outer':
                for way in result_NodesWaysFromRels.ways:
                    if way.id == relationway.ref:
                        OuterList.append(
                            geometry.LineString([node.lon, node.lat]
                                                for node in way.nodes))
            else:
                for way in result_NodesWaysFromRels.ways:
                    if way.id == relationway.ref:
                        InnerList.append(
                            geometry.LineString([node.lon, node.lat]
                                                for node in way.nodes))

        OuterPoly = []
        # in case outer polygons are not fragmented, add those already in correct geometry
        for outer in OuterList:
            if outer.is_closed:
                OuterPoly.append(
                    Polygon(outer.coords[0:(len(outer.coords) + 1)]))
                OuterList.remove(outer)

        initialLength = len(OuterList)
        i = 0
        OuterCoords = []

        # loop to account for more than one fragmented outer ring
        while (len(OuterList) > 0) & (i <= initialLength):
            OuterCoords.append(
                OuterList[0].coords[0:(len(OuterList[0].coords) + 1)])
            OuterList.remove(OuterList[0])
            for _ in range(0, len(OuterList)):
                # get all the other outer polygon pieces in the right order
                # (only works if fragments are in correct order, anyways!!
                # so added another loop around it in case not!)
                for outer in OuterList:
                    if outer.coords[0] == OuterCoords[-1][-1]:
                        OuterCoords[-1] = OuterCoords[-1] + outer.coords[0:(
                            len(outer.coords) + 1)]
                        OuterList.remove(outer)

        for entry in OuterCoords:
            if len(entry) > 2:
                OuterPoly.append(Polygon(entry))

        PolyList = OuterPoly
        # get the inner polygons (usually in correct, closed shape - not accounting
        # for the fragmented case as in outer poly)
        for inner in InnerList:
            if inner.is_closed:
                PolyList.append(Polygon(inner))

        MultiPoly.append(MultiPolygon([shape(poly) for poly in PolyList]))

    schema_multi = {
        'geometry': 'MultiPolygon',
        'properties': {
            'Name': 'str:80',
            'Type': 'str:80',
            'Item': 'str:80'
        }
    }

    shapeout_multi = (save_path + '/' + str(item) + '_multi_' +
                      str(int(bbox[0])) + '_' + str(int(bbox[1])) + ".shp")

    with fiona.open(shapeout_multi,
                    'w',
                    crs=from_epsg(4326),
                    driver='ESRI Shapefile',
                    schema=schema_multi) as output:
        for i in range(0, len(MultiPoly)):
            prop1 = {
                'Name': relation.tags.get("name", "n/a"),
                'Type': relation.tags.get("type", "n/a"),
                'Item': item
            }
            geom = mapping(MultiPoly[i])
            output.write({'geometry': geom, 'properties': prop1})
    gdf_multi = geopandas.read_file(
        shapeout_multi)  # save_path + '/' + shapeout_multi)
    for ending in ['.shp', ".cpg", ".dbf", ".prj", '.shx']:
        os.remove(save_path + '/' + str(item) + '_multi_' + str(int(bbox[0])) +
                  '_' + str(int(bbox[1])) + ending)
    gdf_all = gdf_all.append(gdf_multi, sort=True)

    print('Combined all results for %s to one GeoDataFrame: done' % item)

    return gdf_all
Exemple #26
0
def _create_relation_geometry(relation_key, relation_val, footprints):
    """
    Create Shapely geometry for relations: Polygons with holes or MultiPolygons.

    OSM relations are used to define complex polygons - polygons with holes or
    multi-polygons. The polygons' outer and inner rings may be made up of chains
    of LineStrings. https://wiki.openstreetmap.org/wiki/Relation:multipolygon
    requires that multipolygon rings have an outer or inner 'role'.

    OSM's data model allows a polygon type tag e.g. 'building' to be added to
    any OSM element. This can include non-polygon relations e.g. bus routes.
    Relations that do not have at least one closed ring with an outer role
    are filtered out.

    Inner rings that are tagged with the footprint type in their own right e.g.
    landuse=meadow as an inner ring of landuse=forest will have been included in
    the footprints dictionary as part of the original parsing and are not dealt
    with here.

    Parameters
    ----------
    relation_key : int
        the id of the relation to process
    relation_val : dict
        members and tags of the relation
    footprints : dict
        dictionary of all footprints (including open and closed ways)

    Returns
    -------
    Shapely Polygon or MultiPolygon
    """
    # lists to hold member geometries
    outer_polys, outer_lines, inner_polys, inner_lines = _members_geom_lists(
        relation_val, footprints)

    # try to polygonize open outer ways and concatenate them to outer_polys
    if len(outer_lines) > 0:
        try:
            result = list(polygonize(outer_lines))
        except Exception:
            utils.log(
                f"polygonize failed for outer ways in relation: {relation_key}"
            )
        else:
            outer_polys += result

    # try to polygonize open inner ways and concatenate them to inner_polys
    if len(inner_lines) > 0:
        try:
            result = list(polygonize(inner_lines))
        except Exception:
            utils.log(
                f"polygonize failed for inner ways in relation: {relation_key}"
            )
        else:
            inner_polys += result

    # filter out relations missing both 'outer' and 'inner' polygons or just 'outer'
    multipoly = []
    if len(outer_polys + inner_polys) == 0:
        utils.log(
            f"Relation {relation_key} missing outer and inner closed ways")
    elif len(outer_polys) == 0:
        utils.log(f"Relation {relation_key} missing outer closed ways")
    # process the others to multipolygons
    else:
        for outer_poly in outer_polys:
            outer_poly = outer_poly.buffer(
                0)  # fix invalid geometry if present
            temp_poly = outer_poly
            for inner_poly in inner_polys:
                inner_poly = inner_poly.buffer(
                    0)  # fix invalid geometry if present
                if inner_poly.within(outer_poly):
                    temp_poly = temp_poly.difference(inner_poly)
            multipoly.append(temp_poly)

    # return relations with one outer way as Polygons, multiple outer ways
    # as MultiPolygons
    if len(multipoly) == 1:
        return multipoly[0]
    elif len(multipoly) > 1:
        return MultiPolygon(multipoly)
    else:
        utils.log(
            f"relation {relation_key} could not be converted to a complex footprint"
        )
def write_shapefiles(out_dir,
                     block_size=500,
                     block_overlap=box_size,
                     max_count=np.infty,
                     filter_edges=True,
                     get_background=False):
    """Writes 3 shapefiles: CONTOURS.shp, BLOCK_LINES.shp, POINTS.shp, which respectively contain crop
	contours, block shapes and crop centroids. Also writes a pickle file containing the output in dictionary form.
	This dictionary also contains the dictionary with all parameters used in the simulation under the key 'metadata'.
	The input tif is divided into overlapping blocks of size block_size+2*block_overlap.
	Duplicates in the overlap region are removed using KDTrees. The parameter max_count is included for debug purposes;
	the process is terminated after max_count blocks."""

    field_shape = fiona.open(clp_path)
    field_polygons = []
    for feature in field_shape:
        poly = shape(feature['geometry'])
        field_polygons.append(poly)
    field = MultiPolygon(field_polygons)

    crop_dict, bg_dict = run_model(block_size,
                                   block_overlap,
                                   max_count=max_count,
                                   get_background=get_background)
    crop_dict = process_overlap(crop_dict, block_overlap)

    schema_lines = {'geometry': 'Polygon', 'properties': {'name': 'str'}}
    schema_pnt = {
        'geometry': 'Point',
        'properties': {
            'name': 'str',
            'confidence': 'float'
        }
    }
    schema_cnt = {
        'geometry': 'Polygon',
        'properties': {
            'name': 'str',
            'confidence': 'float'
        }
    }

    with fiona.collection(out_dir + 'CONTOURS.shp',
                          "w",
                          "ESRI Shapefile",
                          schema_cnt,
                          crs=from_epsg(4326)) as output_cnt:  # add projection
        with fiona.collection(out_dir + 'POINTS.shp',
                              "w",
                              "ESRI Shapefile",
                              schema_pnt,
                              crs=from_epsg(4326)) as output_pnt:
            with fiona.collection(out_dir + 'BLOCK_LINES.shp',
                                  "w",
                                  "ESRI Shapefile",
                                  schema_lines,
                                  crs=from_epsg(4326)) as output_lines:

                for (i, j) in crop_dict:
                    contours = crop_dict[(i, j)]['contours']
                    centroids = crop_dict[(i, j)]['centroids']
                    probs = crop_dict[(i, j)]['confidence']
                    (i_ad, j_ad, height, width) = crop_dict[(i, j)]['block']

                    count = 0
                    for (k, cnt) in enumerate(contours):  # write contours
                        xs, ys = cnt[:, 1] + j_ad, cnt[:, 0] + i_ad
                        centroid = (centroids[k, 0] + j_ad,
                                    centroids[k, 1] + i_ad)
                        transformed_contour = Polygon([
                            transform * (xs[l], ys[l]) for l in range(len(xs))
                        ])
                        transformed_centroid = Point(transform * centroid)
                        try:
                            if transformed_contour.difference(
                                    field
                            ).is_empty or not filter_edges:  # if contour is complete enclosed in field
                                output_cnt.write({
                                    'properties': {
                                        'name': '({},{}): {}'.format(i, j, k),
                                        'confidence': float(max(probs[k]))
                                    },
                                    'geometry':
                                    mapping(transformed_contour)
                                })
                                output_pnt.write({
                                    'properties': {
                                        'name': '({},{}): {}'.format(i, j, k),
                                        'confidence': float(max(probs[k]))
                                    },
                                    'geometry':
                                    mapping(transformed_centroid)
                                })
                                count += 1
                            else:
                                print('Crop ({},{}):{} intersects field edge'.
                                      format(i, j, k))
                        except:
                            print('Contour ({},{}):{} invalid'.format(i, j, k))
                    print('{} crops written to block ({},{})'.format(
                        count, i, j))

                    block_vertices = [(i_ad, j_ad), (i_ad + height, j_ad),
                                      (i_ad + height, j_ad + width),
                                      (i_ad, j_ad + width)]
                    transformed_vertices = [
                        transform * (a, b) for (b, a) in block_vertices
                    ]
                    output_lines.write({
                        'properties': {
                            'name': 'block ({},{})'.format(i, j)
                        },
                        'geometry':
                        mapping(Polygon(transformed_vertices))
                    })

    params['input_tif'] = img_path
    params['input_dem'] = dem_path
    params['input_clp'] = clp_path
    crop_dict['metadata'] = params

    with open(out_dir + 'DATA.pickle', 'wb') as file:
        pickle.dump(crop_dict, file)

    if get_background:
        with open(out_dir + 'BG_DATA.pickle', 'wb') as bg_file:
            pickle.dump(bg_dict, bg_file)

    print('\nFinished!')
 def deserialize_multi_polygon(self, gpbGeometry) -> MultiPolygon:
     polygons = [
         self.deserialize_polygon for geometry in gpbGeometry.geometries
     ]
     return MultiPolygon(polygons)
Exemple #29
0
def random_multipolygon(size):
    polygons = []
    for i in range(size):
        polygons.append(random_polygon(i))
    result = MultiPolygon(polygons)
    return result
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
###############################################################################
from shapely.geometry import Point
from shapely.ops import cascaded_union
polygons = [Point(i, 0).buffer(0.7) for i in range(5)]
cascaded_union(polygons)
###############################################################################
from shapely.geometry import MultiPolygon
m = MultiPolygon(polygons)
m.area
cascaded_union(m).area