Пример #1
0
    def _center_pts(pts):
        '''Fancy label position generator, using erosion to get label coordinate'''
        min = pts.min(0)
        pts -= min
        max = pts.max(0)
        pts /= max

        #probably don't need more than 20 points, reduce detail of the polys
        if len(pts) > 20:
            pts = pts[::len(pts)//20]

        try:
            poly = Polygon([tuple(p) for p in pts])
            for i in np.linspace(0,1,100):
                if poly.buffer(-i).is_empty:
                    return list(poly.buffer(-last_i).centroid.coords)[0] * max + min
                last_i = i

            print("unable to find zero centroid...")
            return list(poly.buffer(-100).centroid.coords)[0] * max + min
        except:
            # This may not be worth being so verbose about... I think this is only for label positions.
            import warnings
            warnings.warn("Shapely error - computing mean of points instead of geometric center")
            return np.nanmean(pts, 0)
Пример #2
0
	def gerber2shapely(self):
		for gbr in self.gerber.figures:
			if(gbr.active < 1):
				continue
			if(gbr.type > 4):
				continue
			if(gbr.type == 0):	#Path or Polygon
				if gbr.fig_type == 0:	#Circle
					cap_s=1
				elif gbr.fig_type == 1:	#Rectangle
					cap_s=3
				else:
					cap_s=1
				if gbr.polygon:
					tmp_polygon = Polygon(gbr.points)
					self.tmp_figs.add(self.Figs(tmp_polygon.buffer(float(gbr.w)/2.0+self.tool_r, cap_style=cap_s)))
					raw_polygon = tmp_polygon.buffer(float(gbr.w)/2.0, cap_style=cap_s)
					self.raw_figs.add(self.Figs(raw_polygon))
					if raw_polygon.interiors:
						for interior in raw_polygon.interiors:
							self.raw_figs.add(self.Figs(Polygon(interior)))
				else:
					if self.zone_segment:
						if LineString(gbr.points).is_simple:
							self.lines.append(self.Line(gbr.points,fig_type = cap_s,r = float(gbr.w)/2.0+self.tool_r))
						else:
							self.tmp_figs.add(self.Figs(Polygon(gbr.points).buffer(float(gbr.w)/2.0+self.tool_r, cap_style=cap_s)))
					else:
						self.tmp_figs.add(self.Figs(LineString(gbr.points).buffer(float(gbr.w)/2.0+self.tool_r, cap_style=cap_s)))
					#
					self.raw_figs.add(self.Figs(LineString(gbr.points).buffer(float(gbr.w)/2.0, cap_style=cap_s)))
			elif(gbr.type == 1):
				#Circle
				self.tmp_figs.add(self.Figs(Point(gbr.cx,gbr.cy).buffer(float(gbr.r)+self.tool_r,resolution=self.circle_ang)))
				self.raw_figs.add(self.Figs(Point(gbr.cx,gbr.cy).buffer(float(gbr.r),resolution=self.circle_ang)))
			elif(gbr.type == 2):
				#Rectangle
				points = [(gbr.x1-self.tool_r,gbr.y1-self.tool_r),(gbr.x1-self.tool_r,gbr.y2+self.tool_r),
					(gbr.x2+self.tool_r,gbr.y2+self.tool_r),(gbr.x2+self.tool_r,gbr.y1-self.tool_r),
					(gbr.x1-self.tool_r,gbr.y1-self.tool_r)]
				self.tmp_figs.add(self.Figs(Polygon(points)))
				points = [(gbr.x1,gbr.y1),(gbr.x1,gbr.y2),(gbr.x2,gbr.y2),(gbr.x2,gbr.y1),(gbr.x1,gbr.y1)]
				self.raw_figs.add(self.Figs(Polygon(points)))
			elif(gbr.type == 3):
				#Oval
				if gbr.h <= gbr.w:
					tmp_r = gbr.h/2.0+self.tool_r
					shift_x = (gbr.w-gbr.h)/2.0
					self.tmp_figs.add(self.Figs(LineString([(gbr.cx-shift_x,gbr.cy),(gbr.cx+shift_x,gbr.cy)]).buffer(tmp_r)))
					self.raw_figs.add(self.Figs(LineString([(gbr.cx-shift_x,gbr.cy),(gbr.cx+shift_x,gbr.cy)]).buffer(gbr.h/2.0)))
				else:
					tmp_r = gbr.w/2.0+self.tool_r
					shift_y = (gbr.h-gbr.w)/2.0
					self.tmp_figs.add(self.Figs(LineString([(gbr.cx,gbr.cy-shift_y),(gbr.cx,gbr.cy+shift_y)]).buffer(tmp_r)))
					self.raw_figs.add(self.Figs(LineString([(gbr.cx,gbr.cy-shift_y),(gbr.cx,gbr.cy+shift_y)]).buffer(gbr.w/2.0)))
			elif(gbr.type == 4):
				#Polygon
				self.tmp_figs.add(self.Figs(Point(gbr.cx,gbr.cy).buffer(float(gbr.r)+self.tool_r,resolution=gbr.sides)))
				self.raw_figs.add(self.Figs(Point(gbr.cx,gbr.cy).buffer(float(gbr.r),resolution=gbr.sides)))
Пример #3
0
    def _center_pts(pts):
        '''Fancy label position generator, using erosion to get label coordinate'''
        min = pts.min(0)
        pts -= min
        max = pts.max(0)
        pts /= max

        poly = Polygon([tuple(p) for p in pts])
        for i in np.linspace(0,1,100):
            if poly.buffer(-i).is_empty:
                return list(poly.buffer(-last_i).centroid.coords)[0] * max + min
            last_i = i

        print("unable to find zero centroid...")
        return list(poly.buffer(-100).centroid.coords)[0] * max + min
    def test_polygon_topojson(self):
        '''
        Create a polygon to cover the world and make sure it is "similar" (clip on)
        '''
        
        self.defineGeometry('POLYGON')

        geom = Polygon( [(-180, -85.0511),
                         ( 180, -85.0511),
                         ( 180, 85.0511), 
                         (-180, 85.0511), 
                         (-180, -85.0511)])

        self.insertTestRow(geom.wkt)
        
        tile_mimetype, tile_content = utils.request(self.config_file_content, "vectile_test", "topojson", 0, 0, 0)
        self.assertTrue(tile_mimetype.endswith('/json'))
        topojson_result = json.loads(tile_content)
        topojson_xform = get_topo_transform(topojson_result)
        
        parts = [topojson_result['arcs'][arc[0]] for arc in topojson_result['objects']['vectile']['geometries'][0]['arcs']]
        parts = [map(topojson_xform, topojson_dediff(part)) for part in parts]
        
        result_geom = Polygon(*parts)
        expected_geom = Polygon( [(-180, -85.0511), (180, -85.0511), (180, 85.0511), (-180, 85.0511), (-180, -85.0511)])

        # What is going on here is a bit unorthodox, but let me explain. The clipping
        # code inside TileStache relies on GEOS Intersection alongside some TileStache code
        # that creates a clipping geometry based on the tile perimeter. The tile perimeter
        # is made out of 17 (x,y) coordinates and not a box. Hence, the GEOS::Intersection
        # os that perimeter with the geometry of the vector we get back from the data provider
        # can end with extra vertices. Although it is the right shape, we cannot do a straight
        # comparisson because the expected geometry and the returned geometry *may* have extra
        # vertices. Simplify() will not do much because the distance of the vertices can clearly
        # be bigger than the tolerance. 
        #
        # To add to this, because of double precision, the vertices may not be exact.
        # An optional way to find out if two shapes are close enough, is to buffer the two features
        # by just a little bit and then subtract each other like so:
        #
        #             geometry1.difference(geometry2) == empty set?
        #             geometry2.difference(geometry1) == empty set?
        # 
        # If both geometries are empty, then they are similar. Hence what you see below
        
        # Close enough?
        self.assertTrue(result_geom.difference(expected_geom.buffer(1)).is_empty)
        self.assertTrue(expected_geom.difference(result_geom.buffer(1)).is_empty)
Пример #5
0
class PaintConnectTest3(PaintTestCase):
    """
    Tests with linerings among elements.
    """

    def setUp(self):
        self.boundary = Polygon([[0, 0], [0, 5], [5, 5], [5, 0]])
        print "TEST w/ LinearRings"

    def test_jump2(self):
        print "Test: WALK Expected"
        paths = [
            LineString([[0.5, 2], [2, 4.5]]),
            LineString([[2, 0.5], [4.5, 2]]),
            self.boundary.buffer(-0.5).exterior
        ]
        for p in paths:
            print p

        tooldia = 1.0

        print "--"
        result = Geometry.paint_connect(mkstorage(deepcopy(paths)), self.boundary, tooldia)

        result = list(result.get_objects())
        for r in result:
            print r

        self.assertEqual(len(result), 1)
Пример #6
0
class Perimeter(object):
	def __init__(self, perimeter_points, buffer_size=10):
		self.outer_polygon = Polygon(perimeter_points)
		self.buffer_size = buffer_size
		self.inner_polygon = self.outer_polygon.buffer(-1 * buffer_size)

	def get_bounds(self):
		return self.outer_polygon.bounds

	def get_repulsive_vel(self, xy):
		point = Point(xy)
		containment_field = self.inner_polygon

		if containment_field.contains(point):
			return NULL_VECTOR

		vector_from_center = np.asarray([point.x - containment_field.centroid.x, point.y - containment_field.centroid.y])
		vector_to_center = -1 * vector_from_center

		line_to_center = LineString(((containment_field.centroid.x, containment_field.centroid.y), (point.x, point.y)))
		line_center_field = containment_field.intersection(line_to_center)
		distance_to_field = line_to_center.length - line_center_field.length

		repulsion_scale = distance_to_field / self.buffer_size

		return _normalized(vector_to_center) * repulsion_scale

	def get_perimeter_coordinates(self):
		return self.outer_polygon.exterior.coords

	def get_field_coordinates(self):
		return self.inner_polygon.exterior.coords
Пример #7
0
def intersectNodes(path, srs, projName, projSRS, isGridProject, gridResolution):
    j = []
    isGridLine = False
    sf = shapefile.Reader(path)
    nodes = list(getShapelyNodes(projName))
    for shape in sf.shapes():
        shType = shape.shapeType
        # http://en.wikipedia.org/wiki/Shapefile#Shapefile_shape_format_.28.shp.29
        if shType == 5: # Polygon
            sh = Polygon(shape.points)
        elif shType == 3: # Line
            if isGridProject:
                sh = LineString(shape.points)
                isGridLine = True
            else:
                pass
        else:
            consoleAppend('Unknown shape type %s. Continue without access' %shType)

        if srs != projSRS:
            sh = shapelyReproject(sh, srs, projSRS)

        if isGridLine:
            sh = sh.buffer(gridResolution)

        for node in nodes:
            if sh.contains(node[1]): # node.geom
                j.append(node[0]) # node.node_id

    return j if j else None
Пример #8
0
def build_polygon(refs):
    coords = []

    for ref in refs:
        coord = coordsDB.get(str(ref))
        if coord:
            coord = map(float, coord.split(','))
            coords.append(coord)
        else:
            # for some reason coordinates are missing
            # this is usually because an extract cuts coordinates out
            return False

    if len(coords) > 2:
        # 3 point minimum for polygon
        # avoids common osm problems
        polygon = Polygon(coords)

        if polygon.is_valid:
            return polygon
        else:
            # 0.0 buffer cleans invalid polygons
            # they're invalid for many reasons, people prone problems
            return polygon.buffer(0.0)
    else:
        return False
Пример #9
0
    def test_attribute_chains(self):

        # Attribute Chaining
        # See also ticket #151.
        p = Polygon(((0.0, 0.0), (0.0, 1.0), (-1.0, 1.0), (-1.0, 0.0)))
        self.assertEqual(
            list(p.boundary.coords),
            [(0.0, 0.0), (0.0, 1.0), (-1.0, 1.0), (-1.0, 0.0), (0.0, 0.0)])

        ec = list(Point(0.0, 0.0).buffer(1.0, 1).exterior.coords)
        self.assertIsInstance(ec, list)  # TODO: this is a poor test

        # Test chained access to interiors
        p = Polygon(
            ((0.0, 0.0), (0.0, 1.0), (-1.0, 1.0), (-1.0, 0.0)),
            [((-0.25, 0.25), (-0.25, 0.75), (-0.75, 0.75), (-0.75, 0.25))]
        )
        self.assertEqual(p.area, 0.75)

        """Not so much testing the exact values here, which are the
        responsibility of the geometry engine (GEOS), but that we can get
        chain functions and properties using anonymous references.
        """
        self.assertEqual(
            list(p.interiors[0].coords),
            [(-0.25, 0.25), (-0.25, 0.75), (-0.75, 0.75), (-0.75, 0.25),
             (-0.25, 0.25)])
        xy = list(p.interiors[0].buffer(1).exterior.coords)[0]
        self.assertEqual(len(xy), 2)

        # Test multiple operators, boundary of a buffer
        ec = list(p.buffer(1).boundary.coords)
        self.assertIsInstance(ec, list)  # TODO: this is a poor test
Пример #10
0
def fix_geometry(geometry):
    """Attempts to fix an invalid geometry (from https://goo.gl/nfivMh)"""
    try:
        return geometry.buffer(0)
    except ValueError:
        pass

    polygons = geom_as_list(geometry)

    fixed_polygons = list()
    for i, polygon in enumerate(polygons):
        if not linear_ring_is_valid(polygon.exterior):
            continue

        interiors = []
        for ring in polygon.interiors:
            if linear_ring_is_valid(ring):
                interiors.append(ring)

        fixed_polygon = Polygon(polygon.exterior, interiors)

        try:
            fixed_polygon = fixed_polygon.buffer(0)
        except ValueError:
            continue

        fixed_polygons.extend(geom_as_list(fixed_polygon))

    if len(fixed_polygons) > 0:
        return MultiPolygon(fixed_polygons)
    else:
        return None
Пример #11
0
def polybuff(tum,minus=False):
    if minus==True:
        offs=-offset
    else:
        offs=offset
    tum=Polygon(tum)
    tum=tum.buffer(offs)
    return np.array(tum.exterior.coords)
Пример #12
0
    def _center_pts(pts):
        '''Fancy label position generator, using erosion to get label coordinate'''
        min = pts.min(0)
        pts -= min
        max = pts.max(0)
        pts /= max

        #probably don't need more than 20 points, reduce detail of the polys
        if len(pts) > 20:
            pts = pts[::len(pts)/20]

        poly = Polygon([tuple(p) for p in pts])
        for i in np.linspace(0,1,100):
            if poly.buffer(-i).is_empty:
                return list(poly.buffer(-last_i).centroid.coords)[0] * max + min
            last_i = i

        print("unable to find zero centroid...")
        return list(poly.buffer(-100).centroid.coords)[0] * max + min
Пример #13
0
def shape_to_polygons(shape):
    def inside(pt):
        return hypot(*pt) <= R
    def adjust(pt):
        x, y = pt
        a = atan2(y, x)
        x = cos(a) * R
        y = sin(a) * R
        return (x, y)
    result = []
    parts = list(shape.parts) + [len(shape.points)]
    for i1, i2 in zip(parts, parts[1:]):
        points = map(tuple, shape.points[i1:i2])
        points = map(laea, points)
        points = filter(None, points)
        p = Polygon(points)
        p = p.buffer(-0.01)
        p = p.buffer(0.01)
        p = p.simplify()
        if p.is_empty:
            continue
        if isinstance(p, Polygon):
            ps = [p]
        else:
            ps = p.geoms
        for p in ps:
            points = list(p.exterior.coords)
            print points
            for a, b in zip(points, points[1:]):
                # if not a[-1] and not b[-1]:
                #     continue
                a = a[:2]
                b = b[:2]
                in1 = inside(a)
                in2 = inside(b)
                if not in1 and not in2:
                    continue
                if in1 and not in2:
                    b = adjust(b)
                if in2 and not in1:
                    a = adjust(a)
                result.append(LineString([a, b]))
    return result
Пример #14
0
def _crosses_antimeridian(region: Polygon) -> bool:
    """
    Determine if the given region crosses the Antimeridian line, by converting
    the given Polygon from -180;180 to 0;360 and checking if the antimeridian
    line crosses it.

    This only works with Polygons without holes

    :param region: Polygon to test
    """

    # Convert region to only have positive longitudes.
    # This way we can perform a simple antimeridian check

    old_exterior = region.exterior.coords
    new_exterior = []
    for point in old_exterior:
        lon, lat = point[0], point[1]
        if -180. <= lon < 0.:
            lon += 360.
        new_exterior.append((lon, lat))
    converted_region = Polygon(new_exterior)

    # There's a problem at this point. Any polygon crossed by the zero-th
    # meridian can in principle convert to an inverted polygon that is crossed
    # by the antimeridian.

    if not converted_region.is_valid:
        # The polygon 'became' invalid upon conversion => probably the original
        # polygon is what we want

        # noinspection PyBroadException
        try:
            # First try cleaning up geometry that is invalid
            converted_region = converted_region.buffer(0)
        except BaseException:
            pass
        if not converted_region.is_valid:
            return False

    test_line = LineString([(180, -90), (180, 90)])
    if test_line.crosses(converted_region):
        # The converted polygon seems to be valid and crossed by the
        # antimeridian. At this point there's no 'perfect' way how to tell if
        # we wanted the converted polygon or the original one.

        # A simple heuristic is to check for size. The smaller one is quite
        # likely the desired one
        if converted_region.area < region.area:
            return True
        else:
            return False
    else:
        return False
Пример #15
0
def cluster_to_venues(indices, vloc, kdtree, n_steps=5):
    # Given a cluster (ie a set of venues indices), it should return
    # neighborhoods (ie compact/complete sets of venues indices) that will be
    # evaluated by EMD.
    # Given how DBSCAN works, most of these clusters look rather convex, so
    # convex hull could be a good option. Otherwise, I could use CGAL binding
    # to get alpha shapes. Then I can consider bounding box (called envelope
    # by Shapely) or circle. Finally, some dilation and erosion of the
    # previous shapes.
    # I can also add or remove individual points (but it's unclear which one,
    # see notebook) while maintaining more or less neighborhood property.

    # Get initial polygon
    points = vloc[indices, :]
    try:
        hull = points[ConvexHull(points).vertices, :]
    except (KeyboardInterrupt, SystemExit):
        raise
    except:
        print(indices)
        return []
    poly = Polygon(hull)
    center = np.array(poly.centroid.coords)

    # Query neighboring venues
    radius = np.max(cdist(np.array(poly.exterior.coords), center))
    cd_idx = kdtree.query_ball_point(center, 2.0*radius)[0]

    # Build increasing regions
    inc = 1.0*radius/n_steps
    extensions = [poly]
    extensions += [poly.buffer(i*inc,
                               resolution=2).convex_hull.simplify(30, False)
                   for i in range(1, n_steps+1)]

    # Get venues inside them
    remaining = set(cd_idx)
    inside = set([])
    res_cluster = []
    for region in extensions:
        if region.exterior is None:
            continue
        cpoly = np.array(region.exterior.coords)
        inside_this = set([idx for idx in remaining
                           if point_inside_poly(cpoly, vloc[idx, :])])
        remaining.difference_update(inside_this)
        inside.update(inside_this)
        res_cluster.append(list(inside))
    return res_cluster
Пример #16
0
 def SpatialToplogy (Spatial_A,Spatial_B):
            if Spatial_A[4] == 'Point' and Spatial_B[4]== 'Point':
              Point_0=Point(Spatial_A[0],Spatial_A[1])
              Point_1=Point(Spatial_B[0],Spatial_B[1])
              #Point to point relationships
              if Point_0.equals(Point_1): return 'Point1 equals Point2'
              if Point_0.within(Point_1.buffer(2)): return 'Point1 lies within a buffer of 2 m from Point2'
              if Point_0.overlaps(Point_1): return 'Point1 overlaps Point2'
              #if Point_0.disjoint(Point_1): return 'Point1 disjoint Point2'
              
              #Point to line relationships
            if Spatial_A[4] == 'Point' and Spatial_B[4]== 'Line':
                 Point_0=Point(Spatial_A[0],Spatial_A[1])
                 Line_0=LineString([(Spatial_B[0],Spatial_B[1]),(Spatial_B[2],Spatial_B[3])])
                 if Point_0.touches(Line_0):return 'Point1 touches Line1'
                 if Point_0.within(Line_0.buffer(2)):return 'Point1 lies within a buffer of 2 m from L1'
              #Point to polygon relationships
            if Spatial_A[4] == 'Point' and Spatial_B[4]== 'Polygon':
                 Point_0=Point(Spatial_A[0],Spatial_A[1])
                 Polygon_0=Polygon([(Spatial_B[0],Spatial_B[1]),(Spatial_B[2],Spatial_B[1]),(Spatial_B[2],Spatial_B[3]),(Spatial_B[0],Spatial_B[3])])
                 if Point_0.touches(Polygon_0):return 'Point1 touches Polygon1'
                 if Point_0.within(Polygon_0):return'Point1 lies within Polygon1'
                 if Point_0.overlaps(Polygon_0):return 'Point1 lies overlaps Polygon1'
             #Line to line relationships
            if Spatial_A[4]=='Line' and Spatial_B[4]=='Line':
                 Line_0=LineString([(Spatial_A[0],Spatial_A[1]),(Spatial_A[2],Spatial_A[3])])
                 Line_1=LineString([(Spatial_B[0],Spatial_B[1]),(Spatial_B[2],Spatial_B[3])])
                 if Line_0.equals(Line_1):return 'Line0 equals Line1'
                 if Line_0.touches(Line_1):return 'Line0 touches Line1'
                 if Line_0.crosses(Line_1):return 'Line0 crosses Line1'
                 if Line_0.within(Line_1.buffer(2)):return 'Line0 lies within a buffer of 2 m Line1'
                 if Line_0.overlaps(Line_1):return 'Line0 overlaps Line1'
              #Line to polygon relationships  
            if Spatial_A[4]=='Line' and Spatial_B[4]=='Polygon':
                 Line_0=LineString([(Spatial_A[0],Spatial_A[1]),(Spatial_A[2],Spatial_A[3])])
                 Polygon_0=Polygon([(Spatial_B[0],Spatial_B[1]),(Spatial_B[2],Spatial_B[1]),(Spatial_B[2],Spatial_B[3]),(Spatial_B[0],Spatial_B[3])])
                 if Line_0.touches(Polygon_0):return 'Line0 touches Polygon1'
                 if Line_0.crosses(Polygon_0):return 'Line0 crosses Polygon1'
                 if Line_0.within(Polygon_0):return 'Line0 lies within Polygon1'
              #Polygon to Polygon relationships
            if Spatial_A[4]=='Polygon' and Spatial_B[4]=='Polygon':
                 Polygon_0=Polygon([(Spatial_A[0],Spatial_A[1]),(Spatial_A[2],Spatial_A[1]),(Spatial_A[2],Spatial_A[3]),(Spatial_A[0],Spatial_A[3])])
                 Polygon_1=Polygon([(Spatial_B[0],Spatial_B[1]),(Spatial_B[2],Spatial_B[1]),(Spatial_B[2],Spatial_B[3]),(Spatial_B[0],Spatial_B[3])])
                 if Polygon_0.touches(Polygon_1):return 'Polygon touches Polygon1'
                 if Polygon_0.equals(Polygon_1):return 'Polygon0 equals Polygon1'
                 if Polygon_0.within(Polygon_1):return 'Polygon lies within Polygon1'
                 if Polygon_0.within(Polygon_1.buffer(2)):return 'Polygon lies within a buffer of 2m  Polygon1'
Пример #17
0
def expand_polygon(polyArr, offset):
    """Expand a polygon using the Shapely buffer function"""

    try:
        # Convert the polygon vertex array to a LineStr object
        polyShape = Polyshape(polyArr)
        
        # Offset the polygon by an ammount offset_deg
        polyShapeNew = polyShape.buffer(offset)
        
        # Convert the LineStr object back to a numpy array
        outPolyArr = np.array(polyShapeNew.exterior.xy).transpose()

        return outPolyArr
    
    except Exception:
        
        return np.array([])
Пример #18
0
    def from_coords(self, x, y, name='None', fid=0):
        """Create ``Grain`` from coordinate arrays

        Example:
          >>> g=Grain.from_coords([0,0,2,2],[0,1,1,0])
          >>> g.xy
          array([[ 0.,  0.,  2.,  2.,  0.],
                 [ 0.,  1.,  1.,  0.,  0.]])

        """
        geom = Polygon([(xx, yy) for xx, yy in zip(x, y)])
        # try  to "clean" self-touching or self-crossing polygons
        if not geom.is_valid:
            geom = geom.buffer(0)
        if geom.is_valid and geom.geom_type == 'Polygon':
            return self(geom, name, fid)
        else:
            print('Invalid geometry.'.format(pos))
Пример #19
0
    def test_polygon_pbf(self):
        '''
        Create a polygon to cover the world and make sure it is "similar" (clip on) (pbf)
        '''
        self.defineGeometry('POLYGON')

        geom = Polygon([(-180, -85.05),
                        (180, -85.05),
                        (180, 85.05),
                        (-180, 85.05),
                        (-180, -85.05)])

        self.insertTestRow(geom.wkt)

        tile_mimetype, tile_content = utils.request(self.config_file_content, "vectile_test", "pbf", 0, 0, 0)
        self.assertTrue(tile_mimetype.endswith('/x-protobuf'))
        pbf_result = mapbox_vector_tile.decode(tile_content)
        layer_result = pbf_result['vectile_test']

        extent = tile_bounds_mercator(0, 0, 0)

        result_geom = decoded_pbf_asshape(layer_result['features'][0], extent)
        expected_geom = Polygon([(-180, -85.05), (180, -85.05), (180, 85.05), (-180, 85.05), (-180, -85.05)])

        # What is going on here is a bit unorthodox, but let me explain. The clipping
        # code inside TileStache relies on GEOS Intersection alongside some TileStache code
        # that creates a clipping geometry based on the tile perimeter. The tile perimeter
        # is made out of 17 (x,y) coordinates and not a box. Hence, the GEOS::Intersection
        # os that perimeter with the geometry of the vector we get back from the data provider
        # can end with extra vertices. Although it is the right shape, we cannot do a straight
        # comparisson because the expected geometry and the returned geometry *may* have extra
        # vertices. Simplify() will not do much because the distance of the vertices can clearly
        # be bigger than the tolerance. 
        #
        # To add to this, because of double precision, the vertices may not be exact.
        # An optional way to find out if two shapes are close enough, is to buffer the two features
        # by just a little bit and then subtract each other like so:
        #
        #             geometry1.difference(geometry2) == empty set?
        #             geometry2.difference(geometry1) == empty set?
        # 
        # If both geometries are empty, then they are similar. Hence what you see below
        self.assertTrue(result_geom.difference(expected_geom.buffer(0.01)).is_empty)
        self.assertTrue(expected_geom.difference(result_geom.buffer(0.01)).is_empty)
Пример #20
0
def _polygon_from_coords(coords, fix_geom=False):
    """
    Return Shapely Polygon from coordinates.

    - coords: list of alterating latitude / longitude coordinates
    - fix_geom: automatically fix geometry
    """
    number_of_points = len(coords) / 2
    coords_as_array = np.array(coords)
    reshaped = coords_as_array.reshape(number_of_points, 2)
    points = [(float(i[1]), float(i[0])) for i in reshaped.tolist()]
    polygon = Polygon(points)
    try:
        assert polygon.is_valid
        return polygon
    except AssertionError:
        if fix_geom:
            return polygon.buffer(0)
        else:
            raise RuntimeError("Geometry is not valid.")
Пример #21
0
def WayToPoly(wayId, ways, nodes):
	wayData = ways[wayId]
	wayNodes = wayData[0]
	if wayNodes[0] == wayNodes[-1]:
		#Close polygon
		tags = wayData[1]
		pts = []
		for nid in wayNodes:
			if int(nid) not in nodes:
				print "Warning: missing node", nid
				continue
			pts.append(nodes[int(nid)][0])

		#Require at least 3 points
		if len(pts) < 3:
			return None

		poly = Polygon(pts)
		if not poly.is_valid:
			print "Warning: polygon is not valid"
			print explain_validity(poly)
			poly = poly.buffer(0)
		return poly
	else:
		#Unclosed way
		tags = wayData[1]
		pts = []
		for nid in wayNodes:
			if int(nid) not in nodes:
				print "Warning: missing node", nid
				continue
			pts.append(nodes[int(nid)][0])		

		line = LineString(pts)
		if not line.is_valid:
			print "Warning: polygon is not valid"
			print explain_validity(line)
			line = line.buffer(0)
		return line

	return None	
Пример #22
0
def reduce_polygons(polygonData, hullAreaThreshold, bufferDistance,
                   bufferResolution, numThreshold, areaThreshold,
                   simplifyThreshold):
    polygons = []
    for p in polygonData:
        polygon = Polygon(map(lambda x: (x["x"], x["y"]),
                              p["points"]))

        # For very small regions, use a convex hull
        if polygon.area < hullAreaThreshold:
            polygon = polygon.convex_hull
        # Also buffer by a small distance to aid the cascaded union
        polygon = polygon.buffer(bufferDistance, bufferResolution)

        polygons.append(polygon)

    # Try to merge some polygons
    polygons = cascaded_union(polygons)

    # Normalize the Polygon or MultiPolygon into an array
    if "exterior" in dir(polygons):
        polygons = [polygons]
    else:
        polygons = [p for p in polygons]

    region = []
    # Sort from largest to smallest to faciliate dropping of small regions
    polygons.sort(key=lambda x:-x.area)
    for p in polygons:
        # Try to include regions that are big enough, once we have a
        # few representative regions
        if len(region) > numThreshold and p.area < areaThreshold:
            break

        p = p.simplify(simplifyThreshold)
        region.append({
            "points": map(lambda x: {"x": x[0], "y": x[1]},
                          p.exterior.coords)
        })

    return region
Пример #23
0
def GetInnerPolygons(geo, cornerx, cornery):
	innerPoly = []
	for feature in geo['features'][1:]:
		#print feature['type']
		geom = feature['geometry']
		movedPoly = []		
		for pt in geom['coordinates'][0]:
			movedPoly.append((pt[0]+cornerx, pt[1]+cornery))

		innerPoly.append(movedPoly)

	print "Inner Polys", len(innerPoly), cornerx, cornery

	innerPolyShps = []
	for poly in innerPoly:
		newPoly = Polygon(poly)
		if not newPoly.is_valid:
			print "Warning: invalid polygon (2)"
			print explain_validity(newPoly)
			newPoly = newPoly.buffer(0)
		innerPolyShps.append(newPoly)
	return innerPolyShps
Пример #24
0
def GetOuterPolygons(geo, cornerx, cornery):
	outerPoly = []
	assert geo['type'] == "FeatureCollection"
	feature = geo['features'][0]
	geom = feature['geometry']
	for poly in geom['coordinates'][1:]:
		movedPoly = []		
		for pt in poly:
			movedPoly.append((pt[0]+cornerx, pt[1]+cornery))
		outerPoly.append(movedPoly)

	print "Outer Polys", len(outerPoly), cornerx, cornery

	outerPolyShps = []
	for poly in outerPoly:
		newPoly = Polygon(poly)
		if not newPoly.is_valid:
			print "Warning: invalid polygon (1)"
			print explain_validity(newPoly)
			newPoly = newPoly.buffer(0)
		outerPolyShps.append(newPoly)
	return outerPolyShps
Пример #25
0
class City:
    
    def __init__(self, name, rank, zoom, population, geonameid, location, position, font):
        self.name = name
        self.rank = rank
        self.zoom = zoom
        self.population = population
        self.geonameid = geonameid
        self.location = location
        self.position = position

        self.placement = NE
        self.radius = 4
        self.buffer = 2
        
        x1, y1 = position.x - self.radius, position.y - self.radius
        x2, y2 = position.x + self.radius, position.y + self.radius
        
        self._point_shape = Polygon(((x1, y1), (x1, y2), (x2, y2), (x2, y1), (x1, y1)))
        self._label_shape = None

        self._width, self._height = font.getsize(self.name)
        self._update_label_shape()

    def __repr__(self):
        return '<City: %s>' % self.name
    
    def __hash__(self):
        return id(self)

    def __cmp__(self, other):
        return compare_places(self, other)

    def __unicode__(self):
        return unicode(self.name)
    
    def _update_label_shape(self):
        """
        """
        x, y = self.position.x, self.position.y
        
        if self.placement in (NE, ENE, ESE, SE):
            x += self.radius + self._width/2
        
        if self.placement in (NW, WNW, WSW, SW):
            x -= self.radius + self._width/2

        if self.placement in (NW, NE):
            y -= self._height/2

        if self.placement in (SW, SE):
            y += self._height/2

        if self.placement in (ENE, WNW):
            y -= self._height/6

        if self.placement in (ESE, WSW):
            y += self._height/6
        
        if self.placement in (NNE, SSE, NNW):
            _x = self.radius * cos(pi/4) + self._width/2
            _y = self.radius * sin(pi/4) + self._height/2
            
            if self.placement in (NNE, SSE):
                x += _x
            else:
                x -= _x
            
            if self.placement in (SSE, ):
                y += _y
            else:
                y -= _y
        
        if self.placement == N:
            y -= self.radius + self._height / 2
        
        if self.placement == S:
            y += self.radius + self._height / 2
        
        x1, y1 = x - self._width/2, y - self._height/2
        x2, y2 = x + self._width/2, y + self._height/2
        
        self._label_shape = Polygon(((x1, y1), (x1, y2), (x2, y2), (x2, y1), (x1, y1)))
    
    def label_bbox(self):
        return self._label_shape.envelope
    
    def mask_shape(self):
        return self._label_shape.buffer(self.buffer).envelope.union(self._point_shape)
    
    def move(self):
        self.placement = choice(placements.keys())
        self._update_label_shape()
    
    def placement_energy(self):
        return placements[self.placement]
    
    def overlap_energy(self, other):
        if self.overlaps(other):
            return min(10.0 / self.rank, 10.0 / other.rank)

        return 0.0
    
    def overlaps(self, other, reflexive=True):
        overlaps = self.mask_shape().intersects(other.label_bbox())
        
        if reflexive:
            overlaps |= other.overlaps(self, False)

        return overlaps

    def in_range(self, other, reflexive=True):
        range = self.radius + hypot(self._width + self.buffer*2, self._height + self.buffer*2)
        distance = hypot(self.position.x - other.position.x, self.position.y - other.position.y)
        in_range = distance <= range
        
        if reflexive:
            in_range |= other.in_range(self, False)

        return in_range
Пример #26
0
    def process_one_tile(title_index, algorithm, title_array,
                         algorithm_folder_in, algorithm_folder_out):
        is_intersects = False
        is_within = False
        json_filename = title_array[0]
        csv_filename = json_filename.replace('algmeta.json', 'features.csv')
        tmp_polygon = title_array[1]
        print '--- current title_index is %d' % title_index
        annotation_title_intersect_list = []
        for index2 in range(0, final_total_annotation_count):
            algorithm2 = polygon_algorithm_final[index2][0]
            if (algorithm == algorithm2):
                tmp_poly = [tuple(i) for i in tmp_polygon]
                title_polygon = Polygon(tmp_poly)
                title_polygon = title_polygon.buffer(0)
                annotation = polygon_algorithm_final[index2][1]
                tmp_poly2 = [tuple(j) for j in annotation]
                annotation_polygon = Polygon(tmp_poly2)
                annotation_polygon = annotation_polygon.buffer(0)
                if (title_polygon.within(annotation_polygon)):
                    is_within = True
                    break
                elif (title_polygon.intersects(annotation_polygon)):
                    is_intersects = True
                    annotation_title_intersect_list.append(annotation)

        if (is_within or is_intersects):
            if is_intersects:
                print '       title %d intersects with human markup %d' % (
                    title_index, index2)
            if is_within:
                print '       title %d is within with human markup %d' % (
                    title_index, index2)
            print '       json_filename is %s' % json_filename
            print '       csv_filename is %s' % csv_filename
            json_source_file = os.path.join(algorithm_folder_in, json_filename)
            csv_source_file = os.path.join(algorithm_folder_in, csv_filename)
            json_dest_file = os.path.join(algorithm_folder_out, json_filename)
            csv_dest_file = os.path.join(algorithm_folder_out, csv_filename)
            if not os.path.isfile(json_dest_file):
                shutil.copy2(json_source_file, json_dest_file)
            if not os.path.isfile(csv_dest_file):
                shutil.copy2(csv_source_file, csv_dest_file)
            #update analysis_id info in json file
            with open(json_dest_file, 'r') as f:
                json_data = json.load(f)
                analysis_id = json_data['analysis_id']
                image_width = json_data["image_width"]
                image_height = json_data["image_height"]
                json_data['analysis_id'] = new_execution_id
                json_data['analysis_desc'] = analysis_id
            with open(json_dest_file, 'w') as f2:
                f2.write(json.dumps(json_data))

        if is_intersects:
            #print 'intersect annotation number is %d.' % len(annotation_title_intersect_list);
            json_dest_file = os.path.join(algorithm_folder_out, json_filename)
            csv_dest_file = os.path.join(algorithm_folder_out, csv_filename)
            my_tem_file = 'tmp_file_' + str(title_index) + '.csv'
            tmp_file = os.path.join(algorithm_folder_out, my_tem_file)
            with open(csv_dest_file,
                      'rb') as csv_read, open(tmp_file, 'wb') as csv_write:
                reader = csv.reader(csv_read)
                headers = reader.next()
                #write header to tmp file
                csv_writer = csv.writer(csv_write)
                csv_writer.writerow(headers)
                polygon_index = headers.index('Polygon')
                for row in reader:
                    current_polygon = row[polygon_index]
                    new_polygon = []
                    tmp_str = str(current_polygon)
                    tmp_str = tmp_str.replace('[', '')
                    tmp_str = tmp_str.replace(']', '')
                    split_str = tmp_str.split(':')
                    for i in range(0, len(split_str) - 1, 2):
                        point = [
                            float(split_str[i]) / float(image_width),
                            float(split_str[i + 1]) / float(image_height)
                        ]
                        new_polygon.append(point)
                    tmp_poly = [tuple(i) for i in new_polygon]
                    computer_polygon = Polygon(tmp_poly)
                    computer_polygon = computer_polygon.buffer(0)
                    has_intersects = False
                    for annotation in annotation_title_intersect_list:
                        tmp_poly2 = [tuple(j) for j in annotation]
                        annotation_polygon = Polygon(tmp_poly2)
                        annotation_polygon = annotation_polygon.buffer(0)
                        if (computer_polygon.intersects(annotation_polygon)):
                            has_intersects = True
                            break
                    #write each row to the tmp csv file
                    if has_intersects:
                        csv_writer.writerow(row)
            shutil.move(tmp_file, csv_dest_file)
            print 'change tem file of %s  to file %s' % (tmp_file,
                                                         csv_dest_file)
Пример #27
0
def str2multipolygon(s):
    """ Convert string PTS data into a polygon
    """
    segments = get_segments_from_text(s)

    # Simple case whereby the segment is its own circle, thank goodness
    if (len(segments) == 1 and segments[0][0][0] == segments[0][-1][0]
            and segments[0][0][1] == segments[0][-1][1]):
        print('Single closed polygon found, done and done')
        return MultiPolygon([Polygon(segments[0])])

    # Slightly bad line-work, whereby the start and end points are very close
    # to each other
    if (len(segments) == 1
            and ((segments[0][0][0] - segments[0][-1][0])**2 +
                 (segments[0][0][1] - segments[0][-1][1])**2)**0.5 < 0.05):
        msg = ("assuming linework error, begin: (%.2f %.2f) end: (%.2f %.2f)"
               ) % (segments[0][0][0], segments[0][0][1], segments[0][-1][0],
                    segments[0][-1][1])
        print(msg)
        segments[0][-1] = segments[0][0]
        return MultiPolygon([Polygon(segments[0])])

    # We have some work to do
    load_conus_data()

    # We start with just a conus polygon and we go from here, down the rabbit
    # hole
    polys = [
        copy.deepcopy(CONUS['poly']),
    ]

    for i, segment in enumerate(segments):
        print(('  Iterate: %s/%s, len(segment): %s (%.2f %.2f) (%.2f %.2f)') %
              (i + 1, len(segments), len(segment), segment[0][0],
               segment[0][1], segment[-1][0], segment[-1][1]))
        if segment[0] == segment[-1] and len(segment) > 2:
            print('     segment %s is closed polygon!' % (i, ))
            lr = LinearRing(LineString(segment))
            if not lr.is_ccw:
                print('     polygon is counter-clockwise (exterior)')
                polys.insert(0, Polygon(segment))
                continue
            print("     polygon is clockwise (interior), computing which")
            found = False
            for j, poly in enumerate(polys):
                if poly.intersection(lr):
                    interiors = [l for l in polys[j]._interiors]
                    interiors.append(lr)
                    newp = Polygon(polys[j].exterior, interiors)
                    if newp.is_valid:
                        polys[j] = newp
                        print(("     polygon is interior to polys #%s, "
                               "area now %.2f") % (j, polys[j].area))
                    else:
                        raise Exception(('Adding interior polygon resulted '
                                         'in an invalid geometry, aborting'))
                    found = True
                    break
            if not found:
                print('      ERROR: did not find intersection!')
            continue

        # Attempt to 'clean' this string against the CONUS Polygon
        ls = LineString(segment)
        if ls.is_valid:
            print('     linestring is valid!')
            newls = ls.intersection(CONUS['poly'])
            if newls.is_valid:
                print(("     newls is valid and has geom_type: %s") %
                      (newls.geom_type))
                if newls.geom_type in [
                        'MultiLineString', 'GeometryCollection'
                ]:
                    print(
                        ('     intersection with conuspoly found %s segments')
                        % (len(newls.geoms), ))
                    maxlength = 0
                    for geom in newls.geoms:
                        if geom.length > maxlength:
                            newls2 = geom
                            maxlength = geom.length
                    newls = newls2
                segment = list(zip(*newls.xy))
            else:
                print('     Intersection landed here? %s' % (newls.is_valid, ))
        else:
            print('---------> INVALID LINESTRING? |%s|' % (str(segments), ))

        line = np.array(segment)

        # Figure out which polygon this line intersects
        found = False
        for j in range(-1, -1 - len(polys), -1):
            if found:
                break
            poly = polys[j]
            print("     polys iter j=%s len(polys) = %s" % (j, len(polys)))
            if not poly.intersection(ls):
                print("    - linestring does not intersect poly, continue")
                continue
            found = True
            for q in list(range(5)):
                # Compute the intersection points of this segment and what
                # is left of the pie
                (x, y) = poly.exterior.xy
                pie = np.array(list(zip(x, y)))
                distance = ((pie[:, 0] - line[q, 0])**2 +
                            (pie[:, 1] - line[q, 1])**2)**.5
                idx1 = np.argmin(distance) - 1
                idx1 = idx1 if idx1 > -1 else 0
                distance = ((pie[:, 0] - line[0 - (q + 1), 0])**2 +
                            (pie[:, 1] - line[0 - (q + 1), 1])**2)**.5
                idx2 = np.argmin(distance) + 1
                idx2 = idx2 if idx2 > -1 else 0

                sz = np.shape(pie)[0]
                print(('     Q:%s computed intersections '
                       'idx1: %s/%s idx2: %s/%s') % (q, idx1, sz, idx2, sz))
                if idx1 < idx2:
                    print(
                        ('     CASE 1: idx1:%s idx2:%s Crosses start finish') %
                        (idx1, idx2))
                    # We we piece the puzzle together!
                    tmpline = np.concatenate([line, pie[idx2:]])
                    tmpline = np.concatenate([tmpline, pie[:idx1]])
                    if Polygon(tmpline, polys[j].interiors).is_valid:
                        pie = tmpline
                        polys[j] = Polygon(pie, polys[j].interiors)
                        print(('     replacing polygon index: %s area: %.2f') %
                              (j, polys[j].area))
                    else:
                        continue
                elif idx1 > idx2:
                    print('     CASE 2 idx1:%s idx2:%s' % (idx1, idx2))
                    tmpline = np.concatenate([line, pie[idx2:idx1]])
                    newpoly = Polygon(tmpline)
                    if not newpoly.is_valid:
                        print("    newpolygon is not valid, buffer(0) called ")
                        newpoly = newpoly.buffer(0)
                    polys.append(newpoly)
                    print(("     + adding polygon index: %s area: %.2f "
                           "isvalid: %s") %
                          (len(polys) - 1, polys[-1].area, polys[-1].is_valid))
                else:
                    raise Exception('this should not happen, idx1 == idx2!')
                print("     breaking out of q loop")
                break

        if not found:
            print('     segment did not intersect')

    res = []
    print(('  Resulted in len(polys): %s, now quality controlling') %
          (len(polys), ))
    for i, poly in enumerate(polys):
        if not poly.is_valid:
            print('     ERROR: polygon %s is invalid!' % (i, ))
            continue
        if poly.area == CONUS['poly'].area:
            print('     polygon %s is just CONUS, skipping' % (i, ))
            continue
        print('     polygon: %s has area: %s' % (i, poly.area))
        res.append(poly)
    if len(res) == 0:
        raise Exception(("Processed no geometries, this is a bug!\n"
                         "  s is %s\n"
                         "  segments is %s" % (repr(s), repr(segments))))
    return MultiPolygon(res)
Пример #28
0
def compute_2d_visibility(nodes_gdf,
                          buildings_gdf,
                          max_distance_node_to_building=300):
    """
    The function determines for each node in a nodes GeoDataFrame the relative visible buildings in a buildings GeoDataFrame,
    on the basis of 2d lines of visibility. It returns a dictionary where keys represent nodes' IDs and items are lists of visible buildings' IDs.
     
    Parameters
    ----------
    nodes_gdf: Point GeoDataFrame
        The GeoDataFrame of the nodes of a street network
    buildings_gdf: Polygon GeoDataFrame
        The GeoDataFrame of the buildings of a city
    max_distance_node_to_building: float
        It regulates the search space from the node
        
    Returns
    -------
    dictionary
    """

    nodes_gdf = nodes_gdf.copy()
    buildings_gdf = buildings_gdf.copy()

    nodes_gdf.set_index("nodeID", drop=False, inplace=True, append=False)
    del nodes_gdf.index.name
    buildings_gdf.set_index("buildingID",
                            drop=False,
                            inplace=True,
                            append=False)
    del buildings_gdf.index.name

    sindex_n = nodes_gdf.sindex
    sindex_b = buildings_gdf.sindex
    ix_geo_b = buildings_gdf.columns.get_loc("geometry") + 1
    ix_geo_n = nodes_gdf.columns.get_loc("geometry") + 1
    d = {el: [] for el in nodes_gdf.nodeID}
    interval = max_distance_node_to_building

    for row_b in buildings_gdf.itertuples():
        exteriors = row_b[ix_geo_b].exterior
        coords = list(exteriors.coords)
        no_holes = Polygon(coords)

        possible_obstacles_index = list(
            sindex_b.intersection(no_holes.buffer(interval).bounds))
        possible_obstacles = buildings_gdf.iloc[possible_obstacles_index]
        obstacles = possible_obstacles[possible_obstacles.intersects(
            no_holes.buffer(interval))]
        obstacles.drop(row_b.Index, axis=0, inplace=True, errors="ignore")

        possible_nodes_index = list(
            sindex_n.intersection(no_holes.buffer(interval).bounds))
        possible_nodes = nodes_gdf.iloc[possible_nodes_index]
        nodes_around = possible_nodes[possible_nodes.intersects(
            no_holes.buffer(interval))]
        if len(nodes_around) == 0:
            continue

        new_ring = coords.copy()
        distance_so_far = 0

        for n, i in enumerate(coords):
            if (n == 0) | (n == len(coords) - 1):
                continue
            distance = Point(i).distance(Point(coords[n - 1]))
            if distance < interval:
                distance_so_far = distance_so_far + distance
                continue

            vertexes_to_add = int(distance / interval)
            index = new_ring.index(i)

            for v in range(0, vertexes_to_add):
                distance_along = distance_so_far + interval + (interval * v)
                next_vertex = exteriors.interpolate(distance_along)
                new_index = index + v
                new_ring.insert(new_index, next_vertex)

            distance_so_far = distance_so_far + distance

        new_ring = new_ring[:-1]
        no_obstacles = False
        if (len(obstacles) > 0):
            union = obstacles.unary_union
        else:
            no_obstacles = True

        for row_n in nodes_around.itertuples():

            for coord in new_ring:
                v = LineString([row_n[ix_geo_n], Point(coord)])
                if not no_obstacles:
                    if v.intersects(union):
                        continue

                self_intersection = v.intersection(exteriors)
                if (self_intersection.geom_type == "Point") | (
                        self_intersection.geom_type == "GeometryCollection"):
                    d[row_n.Index] = d[row_n.Index] + [row_b.Index]
                    break
                else:
                    continue
    return (d)
Пример #29
0
def create_mask(poly, netcdf_base, variable, poly_type="polygon"):
    """
   takes a Well Known Text polygon or line
   and produces a masking array for use with numpy
   @param poly - WKT polygon or line
   @param variable - WCS variable to mask off
   @param type - one from [polygon, line]
   """

    loaded_poly = wkt.loads(poly)
    # wcs_envelope = loaded_poly.envelope
    # bounds =  wcs_envelope.bounds
    # bb = ','.join(map(str,bounds))

    # params['bbox']._value = bb
    # params['url'] = createURL(params)
    # variable = params['coverage'].value
    # #wcs_url = wcs_base_url % (bounds[0],bounds[1],bounds[2],bounds[3])
    # wcs_url = params['url'].value
    # #testfile=urllib.URLopener()
    # #testfile.retrieve(wcs_url,"%s.nc" % variable)
    # try:
    #    resp = contactWCSServer(wcs_url)
    # except urllib2.HTTPError:
    #    params["vertical"]._value = params["vertical"].value[1:]
    #    params['url'] = createURL(params)
    #    wcs_url = params['url'].value
    #    resp = contactWCSServer(wcs_url)
    # tfile = saveOutTempFile(resp)
    to_be_masked = netCDF.Dataset(netcdf_base, "a")

    chl = to_be_masked.variables[variable][:]
    fillValue = getFillValue(to_be_masked.variables[variable])

    latvals = to_be_masked.variables[str(getCoordinateVariable(to_be_masked, "Lat").dimensions[0])][:]
    lonvals = to_be_masked.variables[str(getCoordinateVariable(to_be_masked, "Lon").dimensions[0])][:]

    from shapely.geometry import Polygon

    minlat = min(latvals)
    maxlat = max(latvals)
    minlon = min(lonvals)
    maxlon = max(lonvals)

    lonlat_poly = Polygon([[minlon, maxlat], [maxlon, maxlat], [maxlon, minlat], [minlon, minlat], [minlon, maxlat]])
    # print '#'*50
    # print lonlat_poly
    lonlat_poly = lonlat_poly.buffer(0)
    overlap_poly = loaded_poly.intersection(lonlat_poly)
    poly = poly[trim_sizes[poly_type]]

    poly = poly.split(",")
    poly = [x.split() for x in poly]

    # found_lats = [find_closest(latvals, float(x[1])) for x in poly]
    # found_lons = [find_closest(lonvals, float(x[0])) for x in poly]
    if overlap_poly.type == "MultiPolygon":
        found = []
        for poly in overlap_poly:
            found_lats = [find_closest(latvals, float(x)) for x in poly.exterior.xy[1]]
            found_lons = [find_closest(lonvals, float(x)) for x in poly.exterior.xy[0]]
            found.append(zip(found_lons, found_lats))

    elif overlap_poly.type == "MultiLineString":
        found = []
        for poly in overlap_poly:
            found_lats = [find_closest(latvals, float(x)) for x in poly.xy[1]]
            found_lons = [find_closest(lonvals, float(x)) for x in poly.xy[0]]
            found.append(zip(found_lons, found_lats))

    else:
        if poly_type is "line":
            found_lats = [find_closest(latvals, float(x)) for x in overlap_poly.xy[1]]
            found_lons = [find_closest(lonvals, float(x)) for x in overlap_poly.xy[0]]
        else:
            found_lats = [find_closest(latvals, float(x)) for x in overlap_poly.exterior.xy[1]]
            found_lons = [find_closest(lonvals, float(x)) for x in overlap_poly.exterior.xy[0]]

        # found = zip(overlap_poly.exterior.xy[0],overlap_poly.exterior.xy[1])
        found = zip(found_lons, found_lats)

    # img = Image.new('L', (chl.shape[2],chl.shape[1]), 0)
    img = Image.new(
        "L",
        (
            chl.shape[
                to_be_masked.variables[variable].dimensions.index(
                    str(getCoordinateVariable(to_be_masked, "Lon").dimensions[0])
                )
            ],
            chl.shape[
                to_be_masked.variables[variable].dimensions.index(
                    str(getCoordinateVariable(to_be_masked, "Lat").dimensions[0])
                )
            ],
        ),
        0,
    )

    if overlap_poly.type == "MultiPolygon":
        for f in found:
            ImageDraw.Draw(img).polygon(f, outline=2, fill=2)
    elif overlap_poly.type == "MultiLineString":
        for f in found:
            ImageDraw.Draw(img).polygon(f, outline=2, fill=2)
    else:
        if poly_type == "polygon":
            ImageDraw.Draw(img).polygon(found, outline=2, fill=2)
        if poly_type == "line":
            ImageDraw.Draw(img).line(found, fill=2)

    masker = np.array(img)
    # fig = plt.figure()
    masked_variable = []
    # print chl.shape
    # print fillValue
    for i in range(chl.shape[0]):
        # print i
        masked_variable.append(np.ma.masked_array(chl[i, :], mask=[x != 2 for x in masker]))
        # print "adding null values"
        masked_variable[i].filled(fill_value=fillValue)
        where_is_nan = np.isnan(masked_variable[i])
        masked_variable[i][masked_variable[i] == fillValue] = np.nan
        # print masked_variable[i]
        # a = fig.add_subplot(1,5,i+1)
        # imgplot = plt.imshow(masked_variable)

    # plt.show()
    # print np.array(masked_variable).shape
    # where_is_nan = np.isnan(masked_variable)
    # masked_variable[where_is_nan] = 9.96921e+36
    to_be_masked.variables[variable][:] = np.ma.array(masked_variable)[:]
    # print  to_be_masked.variables[variable][:]
    # print np.min(to_be_masked.variables[variable][:])
    # print np.max(to_be_masked.variables[variable][:])
    to_be_masked.close()

    to_be_masked = netCDF.Dataset(netcdf_base, "r+")
    # print to_be_masked.variables[variable][:]
    # print to_be_masked.variables[variable][:]
    # to_be_masked.close()
    return masked_variable, to_be_masked, masker, variable
Пример #30
0
def load_glacier_inventory(RGI_DIRECTORY, RGI_REGION):
    #-- list of Randolph Glacier Inventory files
    RGI_files = []
    RGI_files.append('01_rgi60_Alaska')
    RGI_files.append('02_rgi60_WesternCanadaUS')
    RGI_files.append('03_rgi60_ArcticCanadaNorth')
    RGI_files.append('04_rgi60_ArcticCanadaSouth')
    RGI_files.append('05_rgi60_GreenlandPeriphery')
    RGI_files.append('06_rgi60_Iceland')
    RGI_files.append('07_rgi60_Svalbard')
    RGI_files.append('08_rgi60_Scandinavia')
    RGI_files.append('09_rgi60_RussianArctic')
    RGI_files.append('10_rgi60_NorthAsia')
    RGI_files.append('11_rgi60_CentralEurope')
    RGI_files.append('12_rgi60_CaucasusMiddleEast')
    RGI_files.append('13_rgi60_CentralAsia')
    RGI_files.append('14_rgi60_SouthAsiaWest')
    RGI_files.append('15_rgi60_SouthAsiaEast')
    RGI_files.append('16_rgi60_LowLatitudes')
    RGI_files.append('17_rgi60_SouthernAndes')
    RGI_files.append('18_rgi60_NewZealand')
    RGI_files.append('19_rgi60_AntarcticSubantarctic')
    #-- read input zipfile containing RGI shapefiles
    zs = zipfile.ZipFile(
        os.path.join(RGI_DIRECTORY,
                     '{0}.zip'.format(RGI_files[RGI_REGION - 1])))
    dbf, prj, shp, shx = [
        io.BytesIO(zs.read(s)) for s in sorted(zs.namelist())
        if re.match('(.*?)\.(dbf|prj|shp|shx)$', s)
    ]
    #-- read the shapefile and extract entities
    shape_input = shapefile.Reader(dbf=dbf,
                                   prj=prj,
                                   shp=shp,
                                   shx=shx,
                                   encodingErrors='ignore')
    shape_entities = shape_input.shapes()
    shape_attributes = shape_input.records()
    #-- extract the RGI entities
    poly_dict = {}
    for i, att in enumerate(shape_attributes):
        #-- extract latitude and longitude coordinates for entity
        points = np.array(shape_entities[i].points)
        #-- entities can have multiple parts
        parts = shape_entities[i].parts
        parts.append(len(points))
        #-- list object for coordinates (exterior and holes)
        poly_list = []
        #-- add each part to list
        for p1, p2 in zip(parts[:-1], parts[1:]):
            poly_list.append(list(zip(points[p1:p2, 0], points[p1:p2, 1])))
        #-- convert poly_list into Polygon object with holes
        poly_obj = Polygon(poly_list[0], poly_list[1:])
        #-- Valid Polygon may not possess overlapping exterior or interior rings
        if (not poly_obj.is_valid):
            poly_obj = poly_obj.buffer(0)
        #-- add to dictionary based on RGI identifier
        poly_dict[att[0]] = poly_obj
    #-- close the zipfile
    zs.close()
    #-- return the dictionary of polygon objects and the input file
    return (poly_dict, RGI_files[RGI_REGION - 1])
def isTileRelated2TumorRegion(json_file_path,humanMarkupList_tumor,humanMarkupList_non_tumor):
  with open(json_file_path) as f:
    datajson = json.load(f);
    img_width = datajson['image_width'];
    img_height = datajson['image_height'];
    tile_x = datajson['tile_minx'];
    tile_y = datajson['tile_miny'];
    tile_width = datajson['tile_width'];
    tile_height = datajson['tile_height'];
    
    x1=float(tile_x)/float(image_width);
    y1=float(tile_y)/float(image_height); 
    x2=float(tile_x+tile_width)/float(image_width);
    y2=float(tile_y+tile_height)/float(image_height);
    if x1>1.0:
      x1=1.0;
    if x1<0.0:
      x1=0.0; 
    if x2>1.0:
      x2=1.0;            
    if x2<0.0:
      x2=0.0;            
    if y1>1.0:
      y1=1.0;
    if y1<0.0:
      y1=0.0; 
    if y2>1.0:
      y2=1.0;
    if y2<0.0:
      y2=0.0;  
    tile_polygon_0=[[x1,y1],[x2,y1],[x2,y2],[x1,y2],[x1,y1]];  
    tmp_poly=[tuple(i) for i in tile_polygon_0];
    tmp_polygon = Polygon(tmp_poly);
    tile_polygon = tmp_polygon.buffer(0);
    
    tileHumanMarkupRelation_tumor="disjoin";
    tileHumanMarkupRelation_nontumor="disjoin";
    
    for humanMarkup in humanMarkupList_tumor:                         
      if (tile_polygon.within(humanMarkup)):              
        tileHumanMarkupRelation_tumor="within";        
        break;
      elif (tile_polygon.intersects(humanMarkup)):                
        tileHumanMarkupRelation_tumor="intersect";          
        break;
      else:               
        tileHumanMarkupRelation_tumor="disjoin";           
            
    for humanMarkup2 in humanMarkupList_non_tumor:                        
      if (tile_polygon.within(humanMarkup2)):              
        tileHumanMarkupRelation_nontumor="within";        
        break;
      elif (tile_polygon.intersects(humanMarkup2)):                
        tileHumanMarkupRelation_nontumor="intersect";        
        break;
      else:               
        tileHumanMarkupRelation_nontumor="disjoin";          
                      
    #only calculate features within/intersect tumor/non tumor region           
    if(tileHumanMarkupRelation_tumor=="disjoin" and tileHumanMarkupRelation_nontumor=="disjoin"):                     
      return False;            
    else: 
      return True;      
def findTumor_NonTumorRegions(case_id, user):
    execution_id = user + "_Tumor_Region"
    execution_id2 = user + "_Non_Tumor_Region"

    #handle only tumor region overlap
    humanMarkupList_tumor = []
    tmp_tumor_markup_list0 = []

    for humarkup in objects.find(
        {
            "provenance.image.case_id": case_id,
            "provenance.analysis.execution_id": execution_id
        }, {
            "geometry": 1,
            "_id": 1
        }):
        humarkup_polygon_tmp = humarkup["geometry"]["coordinates"][0]
        tmp_polygon = [tuple(i2) for i2 in humarkup_polygon_tmp]
        tmp_polygon2 = Polygon(tmp_polygon)
        tmp_polygon2 = tmp_polygon2.convex_hull
        tmp_polygon2 = tmp_polygon2.buffer(0)
        tmp_tumor_markup_list0.append(tmp_polygon2)

    #handle MultiPolygon
    tmp_tumor_markup_list = handMultiPolygon(tmp_tumor_markup_list0)

    index_intersected = []
    for index1 in range(0, len(tmp_tumor_markup_list)):
        if index1 in index_intersected:  #skip polygon,which is been merged to another one
            continue
        humarkup_polygon1 = tmp_tumor_markup_list[index1]
        is_within = False
        is_intersect = False
        for index2 in range(0, len(tmp_tumor_markup_list)):
            humarkup_polygon2 = tmp_tumor_markup_list[index2]
            if (index1 <> index2):
                if (humarkup_polygon1.within(humarkup_polygon2)):
                    is_within = True
                    break
                if (humarkup_polygon1.intersects(humarkup_polygon2)):
                    humarkup_polygon1 = humarkup_polygon1.union(
                        humarkup_polygon2)
                    is_intersect = True
                    index_intersected.append(index2)
        if (not is_within and not is_intersect):
            humanMarkupList_tumor.append(humarkup_polygon1)
        if (is_within):
            continue
        if (is_intersect):
            humanMarkupList_tumor.append(humarkup_polygon1)

    #handle only non tumor region overlap
    humanMarkupList_non_tumor = []
    tmp_non_tumor_markup_list0 = []
    for humarkup in objects.find(
        {
            "provenance.image.case_id": case_id,
            "provenance.analysis.execution_id": execution_id2
        }, {
            "geometry": 1,
            "_id": 0
        }):
        humarkup_polygon_tmp = humarkup["geometry"]["coordinates"][0]
        tmp_polygon = [tuple(i2) for i2 in humarkup_polygon_tmp]
        tmp_polygon2 = Polygon(tmp_polygon)
        tmp_polygon2 = tmp_polygon2.convex_hull
        tmp_polygon2 = tmp_polygon2.buffer(0)
        tmp_non_tumor_markup_list0.append(tmp_polygon2)

    #handle MultiPolygon
    tmp_non_tumor_markup_list = handMultiPolygon(tmp_non_tumor_markup_list0)
    index_intersected = []
    for index1 in range(0, len(tmp_non_tumor_markup_list)):
        if index1 in index_intersected:  #skip polygon,which is been merged to another one
            continue
        humarkup_polygon1 = tmp_non_tumor_markup_list[index1]
        is_within = False
        is_intersect = False
        for index2 in range(0, len(tmp_non_tumor_markup_list)):
            humarkup_polygon2 = tmp_non_tumor_markup_list[index2]
            if (index1 <> index2):
                if (humarkup_polygon1.within(humarkup_polygon2)):
                    is_within = True
                    break
                if (humarkup_polygon1.intersects(humarkup_polygon2)):
                    humarkup_polygon1 = humarkup_polygon1.union(
                        humarkup_polygon2)
                    is_intersect = True
                    index_intersected.append(index2)
        if (not is_within and not is_intersect):
            humanMarkupList_non_tumor.append(humarkup_polygon1)
        if (is_within):
            continue
        if (is_intersect):
            humanMarkupList_non_tumor.append(humarkup_polygon1)

    #handle tumor and non tumor region cross overlap
    for index1, tumor_region in enumerate(humanMarkupList_tumor):
        for index2, non_tumor_region in enumerate(humanMarkupList_non_tumor):
            if (tumor_region.within(non_tumor_region)):
                ext_polygon_intersect_points = list(
                    zip(*non_tumor_region.exterior.coords.xy))
                int_polygon_intersect_points = list(
                    zip(*tumor_region.exterior.coords.xy))
                newPoly = Polygon(ext_polygon_intersect_points,
                                  [int_polygon_intersect_points])
                humanMarkupList_non_tumor[index2] = newPoly
                #add a hole to this polygon
            elif (non_tumor_region.within(tumor_region)):
                ext_polygon_intersect_points = list(
                    zip(*tumor_region.exterior.coords.xy))
                int_polygon_intersect_points = list(
                    zip(*non_tumor_region.exterior.coords.xy))
                newPoly = Polygon(ext_polygon_intersect_points,
                                  [int_polygon_intersect_points])
                humanMarkupList_tumor[index1] = newPoly
                #add a hole to this polygon

    return humanMarkupList_tumor, humanMarkupList_non_tumor
pts = [Point(p) for p in points]

# In[16]:

pts = gpd.GeoDataFrame(geometry=pts)

# In[17]:

f, ax = plt.subplots(figsize=(10, 10))
pts.plot(ax=ax)
sns.despine(left=True, bottom=True)
#plt.savefig('tesdiag_3.svg')

# In[18]:

hull = limit.buffer(100)
hull = _densify(hull, 10)
hull_array = np.array(hull.boundary.coords).tolist()
for i, a in enumerate(hull_array):
    points.append(hull_array[i])
    ids.append(-1)

# In[19]:

voronoi_diagram = Voronoi(np.array(points))

# In[20]:


def _regions(voronoi_diagram, unique_id, ids, crs):
    """
Пример #34
0
def project_extents(extents, src_proj, dest_proj, tol=1e-6):
    x1, y1, x2, y2 = extents

    if (isinstance(src_proj, ccrs.PlateCarree) and
        not isinstance(dest_proj, ccrs.PlateCarree) and
        src_proj.proj4_params['lon_0'] != 0):
        xoffset = src_proj.proj4_params['lon_0']
        x1 = x1 - xoffset
        x2 = x2 - xoffset
        src_proj = ccrs.PlateCarree()

    # Limit latitudes
    cy1, cy2 = src_proj.y_limits
    if y1 < cy1: y1 = cy1
    if y2 > cy2:  y2 = cy2

    # Offset with tolerances
    x1 += tol
    x2 -= tol
    y1 += tol
    y2 -= tol

    # Wrap longitudes
    cx1, cx2 = src_proj.x_limits
    if isinstance(src_proj, ccrs._CylindricalProjection):
        lons = wrap_lons(np.linspace(x1, x2, 10000), -180., 360.)
        x1, x2 = lons.min(), lons.max()
    else:
        if x1 < cx1: x1 = cx1
        if x2 > cx2: x2 = cx2

    domain_in_src_proj = Polygon([[x1, y1], [x2, y1],
                                  [x2, y2], [x1, y2],
                                  [x1, y1]])
    boundary_poly = Polygon(src_proj.boundary)
    dest_poly = src_proj.project_geometry(Polygon(dest_proj.boundary), dest_proj).buffer(0)
    if src_proj != dest_proj:
        # Erode boundary by threshold to avoid transform issues.
        # This is a workaround for numerical issues at the boundary.
        eroded_boundary = boundary_poly.buffer(-src_proj.threshold)
        geom_in_src_proj = eroded_boundary.intersection(
            domain_in_src_proj)
        try:
            geom_clipped_to_dest_proj = dest_poly.intersection(
                geom_in_src_proj)
        except:
            geom_clipped_to_dest_proj = None
        if geom_clipped_to_dest_proj:
            geom_in_src_proj = geom_clipped_to_dest_proj
        try:
            geom_in_crs = dest_proj.project_geometry(geom_in_src_proj, src_proj)
        except ValueError:
            src_name =type(src_proj).__name__
            dest_name =type(dest_proj).__name__
            raise ValueError('Could not project data from %s projection '
                             'to %s projection. Ensure the coordinate '
                             'reference system (crs) matches your data '
                             'and the kdims.' %
                             (src_name, dest_name))
    else:
        geom_in_crs = boundary_poly.intersection(domain_in_src_proj)
    return geom_in_crs.bounds
Пример #35
0
def x_section_from_latlon(elevation_file,
                          x_section_lat0,
                          x_section_lon0,
                          x_section_lat1,
                          x_section_lon1,
                          as_polygon=False,
                          auto_clean=False):

    """
    This workflow extracts a cross section from a DEM
    based on the input latitude and longitude point pairs.

    Parameters:
    -----------
    elevation_file: str
        Path to the elevation DEM.
    x_section_lat0: float
        THe first coordinate latitude.
    x_section_lon0: float
        THe first coordinate longitude.
    x_section_lat1: float
        THe second coordinate latitude.
    x_section_lon1: float
        THe second coordinate longitude.
    as_polygon: bool, optional
        If True, will return cross section as a
        :obj:`shapely.geometry.Polygon`. Default is False.
    auto_clean: bool, optional
        If True, will attempt to clean any issues from the polygon.
        Default is False.

    Returns:
    --------
    list or :obj:`shapely.geometry.Polygon`
        Cross section information.
        The list will be xy coordinate pairs.


    Example::

        from shapely.geometry import Polygon
        from xman.xsect import x_section_from_latlon


        elevation_file = '/path/to/elevation.tif'
        lat1 = 34.105265417341442
        lon1 = 38.993958690587505
        lat2 = 34.107264451129197
        lon2 = 38.99355588515526)
        x_sect_list = x_section_from_latlon(elevation_file,
                                            lat1,
                                            lon1,
                                            lat2,
                                            lon2)

    """
    utm_proj = utm_proj_from_latlon(x_section_lat0, x_section_lon0,
                                    as_osr=True)
    sp_ref = osr.SpatialReference()
    sp_ref.ImportFromEPSG(4326)
    geo_to_utm_trans = osr.CoordinateTransformation(sp_ref, utm_proj)

    x_line_m = LineString((
        geo_to_utm_trans.TransformPoint(x_section_lon0, x_section_lat0)[:2],
        geo_to_utm_trans.TransformPoint(x_section_lon1, x_section_lat1)[:2]
    ))

    elevation_utm_ggrid = GDALGrid(elevation_file).to_projection(utm_proj)

    x_sect_list = []

    for x_step in np.linspace(0, x_line_m.length, num=20):
        x_point = x_line_m.interpolate(x_step)
        x_sect_list.append((
            x_step, elevation_utm_ggrid.get_val_coord(x_point.x, x_point.y)
        ))

    if as_polygon or auto_clean:
        x_sect_poly = Polygon(x_sect_list)
        if not x_sect_poly.is_valid and auto_clean:
            x_sect_poly = x_sect_poly.buffer(0)
            print("WARNING: Cross section cleaned up.")
            if hasattr(x_sect_poly, 'geoms'):
                if len(x_sect_poly.geoms) > 1:
                    largest_poly = x_sect_poly.geoms[0]
                    for geom_poly in x_sect_poly.geoms[1:]:
                        if geom_poly.area > largest_poly.area:
                            largest_poly = geom_poly
                    x_sect_poly = largest_poly

        if as_polygon:
            return x_sect_poly

        x_coords, y_coords = x_sect_poly.exterior.coords.xy
        return list(zip(x_coords, y_coords))

    return x_sect_list
Пример #36
0
class StationKeeping(TaskBase):
    def __init__(self, nav, markers, buffer_width=10):
        """Machinery to stay within a marked area.

        nav is a Navigation object for common machinery.

        markers is a list of (lat, lon) points marking the area we need to stay in.
        
        buffer_width is a distance in metres. The boat will try to stay this
        far inside the boundaries of the target area. This is the margin for
        turning, errors, wind changes, and so on.
        """
        self.nav = nav
        self.markers = markers or [
            (50.8, 1.01),
            (50.8, 1.03),
            (50.82, 1.01),
            (50.82, 1.03),
        ]
        self.target_zone = Polygon(
            [self.nav.latlon_to_utm(*p) for p in self.markers])
        self.inner_zone = self.target_zone.buffer(-buffer_width)
        self.goal_heading = 0
        self.sailing_state = 'normal'  # sailing state can be 'normal','tack_to_port_tack' or  'tack_to_stbd_tack'

    def start(self):
        pass

    def calculate_state_and_goal(self):
        """Work out what we want the boat to do
        """
        boat_wind_angle = self.nav.angle_to_wind()
        if self.sailing_state != 'normal':
            # A tack is in progress
            if self.sailing_state == 'tack_to_port_tack':
                beating_angle = self.nav.beating_angle
                continue_tack = boat_wind_angle < beating_angle
            else:  # 'tack_to_stbd_tack'
                beating_angle = -self.nav.beating_angle
                continue_tack = boat_wind_angle > beating_angle
            if continue_tack:
                self.goal_heading = self.nav.wind_angle_to_heading(
                    beating_angle)
                return self.sailing_state, self.goal_heading
            else:
                # Tack completed
                self.sailing_state = 'normal'

        if self.nav.position_xy.within(self.inner_zone):
            # We're safe: carry on with our current heading
            return self.sailing_state, self.goal_heading

        centroid = self.target_zone.centroid
        centroid_ll = self.nav.utm_to_latlon(centroid.x, centroid.y)
        heading_to_centroid = self.nav.position_ll.heading_initial(centroid_ll)

        centroid_wind_angle = self.nav.heading_to_wind_angle(
            heading_to_centroid)
        if abs(centroid_wind_angle) > self.nav.beating_angle:
            # We can sail directly towards the centroid
            if (centroid_wind_angle * boat_wind_angle) > 0:
                # These two have the same sign, so we're on the right tack.
                return ('normal', centroid_wind_angle)
            else:
                # We need to tack before going towards the centroid
                if centroid_wind_angle > 0:
                    switch_to = 'tack_to_port_tack'
                    beating_angle = self.nav.beating_angle
                else:
                    switch_to = 'tack_to_stbd_tack'
                    beating_angle = -self.nav.beating_angle
                self.sailing_state = switch_to
                return switch_to, self.nav.wind_angle_to_heading(beating_angle)

        if boat_wind_angle > 0:
            # On the port tack
            beating_angle = self.nav.beating_angle
            other_tack = 'tack_to_stbd_tack'
        else:
            beating_angle = -self.nav.beating_angle
            other_tack = 'tack_to_port_tack'
        if abs(centroid_wind_angle) > 15:
            # Switch to the tack that will take us closest to the centroid
            if (centroid_wind_angle * boat_wind_angle) < 0:
                return other_tack, self.nav.wind_angle_to_heading(
                    -beating_angle)

        # Sail as close to the wind as we can on our current tack
        return 'normal', self.wind_angle_to_heading(beating_angle)
Пример #37
0
def main():

    print("Starting Program \n\n")
    freeze_support()
    #Set Arguments
    args = param_parser()

    if args.command == 'Step_1':

        laspath = args.laspath
        filetype = args.filetype
        workpath = args.workpath.replace('\\', '/')
        deliverypath = args.deliverypath.replace('\\', '/')
        tilelayout = args.geojsonfile.replace('\\', '/')
        step = float(args.step)
        buffer = args.buffer
        aoi = args.aoi
        fill = args.fill
        hydropointsfiles = None
        if not args.hydropointsfiles == None:
            hydropointsfiles = args.hydropointsfiles
            hydropointsfiles = args.hydropointsfiles.replace('\\',
                                                             '/').split(';')

        cores = args.cores
        dt = strftime("%y%m%d_%H%M")

        deliverypath = AtlassGen.makedir(
            os.path.join(deliverypath,
                         '{0}_makeHydro'.format(dt)).replace('\\', '/'))
        workingdir = AtlassGen.makedir(
            os.path.join(workpath, '{0}_makeHydro_Working'.format(dt)).replace(
                '\\', '/'))

        make_Hydro = {}
        make_Hydro_results = []

        tl = AtlassTileLayout()
        tl.fromjson(tilelayout)

        if not args.createmerge:

            for tile in tl:

                tilename = tile.name  #tile,laspath,deliverypath,workingpath,tilelayout,aoi,filetype,step,fill
                make_Hydro[tilename] = AtlassTask(tilename,
                                                  Hydro.newmakeHydroperTile,
                                                  tile, laspath, deliverypath,
                                                  workingdir, tilelayout, aoi,
                                                  filetype, step, fill)

            p = Pool(processes=cores)
            make_Hydro_results = p.map(AtlassTaskRunner.taskmanager,
                                       make_Hydro.values())

            merged_dir = AtlassGen.makedir(
                os.path.join(deliverypath,
                             'output_Step_1_merged_laz').replace('\\', '/'))
            mergedRawHydroFile = '{0}/merged_Hydro_voids_raw.laz'.format(
                merged_dir)
            hydrolazpath = os.path.join(workingdir,
                                        '04_heighted_voids_clipped')
            AtlassGen.mergeFiles(hydrolazpath, mergedRawHydroFile, 'laz')

        else:
            lazpath = args.lazpath
            merged_dir = AtlassGen.makedir(
                os.path.join(lazpath,
                             'output_Step_1_merged_laz').replace('\\', '/'))
            mergedRawHydroFile = '{0}/merged_Hydro_voids_raw.laz'.format(
                merged_dir)
            AtlassGen.mergeFiles(lazfiles, mergedRawHydroFile, 'laz')

            mergedHydroShp = mergedRawHydroFile.replace('.laz', '.shp')

    if args.command == 'Step_2':

        lazfile = args.lazfile
        outputfolder = args.outputfolder

        shpfolder = AtlassGen.makedir(os.path.join(outputfolder, 'SHP_Files'))
        step = args.step
        epsg = args.epsg
        area_s = int(args.area_limit)

        cores = args.cores

        print('Making SHP files from the input LAZ files')

        #convert laz TO shp files and index
        path, filename, ext = AtlassGen.FILESPEC(lazfile)
        hydroShpfile = os.path.join(outputfolder,
                                    '{0}.shp'.format(filename)).replace(
                                        '\\', '/')
        prjfile = os.path.join(outputfolder,
                               '{0}.prj'.format(filename)).replace('\\', '/')

        prjfile2 = "\\\\10.10.10.142\\projects\\PythonScripts\\EPSG\\{0}.prj".format(
            epsg)

        if os.path.isfile(prjfile2):
            shutil.copy(prjfile2, prjfile)
        else:
            print("PRJ file for {1} is not available in 10.10.10.142".format(
                epsg))

        createBoundries(lazfile, hydroShpfile, step)

        # Eleanor's breaking up the shp file to many shp files come here.
        #########################################################################################################
        maximum_indices = 9999999

        polygons_folder = AtlassGen.makedir(os.path.join(
            shpfolder, 'polygons')).replace('\\', '/')
        polygon_dic = OrderedDict()

        path, shpfilename, ext = AtlassGen.FILESPEC(hydroShpfile)
        # set up output folders

        # import the polygon. it should be a multipolygon.
        read_shp = shp.Reader(hydroShpfile)
        # import all its shaperecords
        shapes = read_shp.shapes()
        i = 1
        deleted_polygons = []
        for i, record in enumerate(shapes):
            # make filled index number for naming. eg 00000002
            idx = str(i).zfill(len(str(maximum_indices)))
            #print('Running index %s of %s...' % (i, len(shapes) - 1))

            # extract coords
            coordinate_tups = record.points
            r = LinearRing(coordinate_tups)

            # convert that to shapely form
            polygon_in = Polygon(r)

            # get area
            area_m = polygon_in.area
            #print(area_m)
            if area_m < area_s:
                deleted_polygons.append(idx)
                #print('Skipped : Area smaller than 100m2 : {0}'.format(idx))
            else:
                file_name = "{0}_{1}.shp".format(shpfilename, idx)
                polygon_dic[idx] = {
                    'area': area_m,
                    'filename': file_name,
                    'points': coordinate_tups,
                    'status': None,
                    'parent': idx
                }

        print(
            '\n################### Deleted {0} polygons less than {1}m2 ########################\n'
            .format(len(deleted_polygons), area_s))
        print('Total polygons to process : {0}'.format(len(polygon_dic)))
        sorted_list = list(
            OrderedDict(
                sorted(polygon_dic.items(),
                       key=lambda x: x[1]['area'],
                       reverse=True)))

        print(sorted_list)

        largest_poly_id = sorted_list[0]
        #set the largest polygon to be water
        polygon_dic[largest_poly_id]['status'] = 'Water'

        islandLakes = []

        for polyid in sorted_list:

            poly_main = Polygon(polygon_dic[polyid]['points'])

            print('\nWorking on Poly iD = {0}'.format(polyid))
            polys_inside = []

            for id in sorted_list:

                poly_sel = polygon_dic[id]

                if poly_sel['status'] == None or poly_sel['status'] == 'Water':
                    for x in range(len(poly_sel['points'])):

                        pp = poly_sel['points'][x]
                        point_to_check = Point(pp)

                        if point_to_check.within(poly_main):
                            print("    Polygon {0} is inside {1}".format(
                                id, polyid))
                            print("    setting {0} to Island".format(id))
                            poly_sel['status'] = 'Island'
                            poly_sel['parent'] = polyid
                            polys_inside.append(id)
                            break

                        else:
                            poly_sel['status'] = 'Water'

            # if island run the loop to see whether there are any water bodies inside this

            if not len(polys_inside) == 0:
                print('     Islands of polygon {0}   : {1}'.format(
                    polyid, polys_inside))

                for j in polys_inside:
                    island = polygon_dic[j]
                    island_poly = Polygon(island['points'])

                    for k in polys_inside:

                        checkIsland = polygon_dic[k]

                        for x in range(len(checkIsland['points'])):

                            pp = checkIsland['points'][x]
                            point_to_check = Point(pp)

                        if point_to_check.within(island_poly):
                            print(
                                "    Polygon {0} is a lake Inside {1}".format(
                                    k, j))
                            print("    setting {0} to IslandLake".format(k))
                            polygon_dic[k]['status'] = 'IslandLake'
                            islandLakes.append(k)

                            break

        for key, value in polygon_dic.items():
            print(key, value['status'], value['parent'])
            buffered_coordinates = []
            if value['status'] == 'Water':
                coordinate_list = value['points']
                # Buffer polygons
                polygon_in = Polygon(coordinate_list)
                try:
                    buffered_poly = Polygon(
                        polygon_in.buffer(0.1).exterior, [r])
                    buffered_coordinates = buffered_poly.exterior.coords
                except:
                    print('could not buffer {0}'.format(key))
                    pass
                finally:
                    #print(len(buffered_coordinates))
                    if len(buffered_coordinates) == 0:
                        buffered_coordinates = coordinate_list

                file_path = os.path.join(
                    polygons_folder,
                    'Water_poly_{0}.shp'.format(key)).replace('\\', '/')
                prjfile = os.path.join(
                    polygons_folder,
                    'Water_poly_{0}.prj'.format(key)).replace('\\', '/')

                # write the shp. Type 5 is a single simple polygon without z values. Type 15 has z.
                sh = shp.Writer(file_path, shapeType=15)
                sh.field('Area', 'N', decimal=4)
                sh.field('Index', 'C')
                sh.field('Type', 'C')

                sh.polyz([buffered_coordinates])
                sh.record(Area=area_m, Index=str(key), Type='Water')
                sh.close()
                shutil.copy(prjfile2, prjfile)

            if value['status'] == 'Island':
                coordinate_list = value['points']
                islandsfolder = AtlassGen.makedir(
                    os.path.join(polygons_folder,
                                 'Islands_shp').replace('\\', '/'))
                file_path = os.path.join(
                    islandsfolder,
                    'Island_poly_{0}.shp'.format(key)).replace('\\', '/')
                prjfile = os.path.join(
                    islandsfolder,
                    'Island_poly_{0}.prj'.format(key)).replace('\\', '/')
                # write the shp. Type 5 is a single simple polygon without z values. Type 15 has z.
                sh = shp.Writer(file_path, shapeType=15)
                sh.field('Area', 'N', decimal=4)
                sh.field('Index', 'C')
                sh.field('Type', 'C')

                sh.polyz([coordinate_list])
                sh.record(Area=area_m, Index=key, Type='Island')
                sh.close()
                shutil.copy(prjfile2, prjfile)

            if value['status'] == 'IslandLake':
                coordinate_list = value['points']
                buffered_coordinates = []
                # Buffer polygons
                polygon_in = Polygon(coordinate_list)
                try:
                    buffered_poly = Polygon(
                        polygon_in.buffer(0.1).exterior, [r])
                    buffered_coordinates = buffered_poly.exterior.coords
                except:
                    print('could not buffer {0}'.format(key))
                    pass
                finally:
                    if len(buffered_coordinates) == 0:
                        buffered_coordinates = coordinate_list
                file_path = os.path.join(
                    polygons_folder,
                    'IslandLake_poly_{0}.shp'.format(key)).replace('\\', '/')
                prjfile = os.path.join(
                    polygons_folder,
                    'IslandLake_poly_{0}.prj'.format(key)).replace('\\', '/')
                # write the shp. Type 5 is a single simple polygon without z values. Type 15 has z.
                sh = shp.Writer(file_path, shapeType=15)
                sh.field('Area', 'N', decimal=4)
                sh.field('Index', 'C')
                sh.field('Type', 'C')

                sh.polyz([buffered_coordinates])
                sh.record(Area=area_m, Index=key, Type='Lake')
                sh.close()
                shutil.copy(prjfile2, prjfile)

        inputlazfile = os.path.join(outputfolder, 'inputHydroLaz.laz')
        shutil.copy(lazfile, inputlazfile)

        #remove lakes from islands
        if not len(islandLakes) == 0:
            IslandLakeshpfiles = AtlassGen.FILELIST(['*IslandLake_*.shp'],
                                                    polygons_folder)
            outputfile = os.path.join(outputfolder,
                                      'InputIslandLakes_removed.laz').replace(
                                          '\\', '/')

            for poly in IslandLakeshpfiles:
                print(poly)
                path, polyname, ext = AtlassGen.FILESPEC(poly)
                polylaz = os.path.join(polygons_folder,
                                       '{0}.laz'.format(polyname)).replace(
                                           '\\', '/')

                #makes the indivisual island lake hydro  laz
                resul = AtlassGen.clip(lazfile, polylaz, poly, 'laz')
                print(resul)

                #now clip it out of the main hydro laz before water bodies get individually clipped
                clipIslandsOut(inputlazfile, poly, outputfile)
                shutil.copy(outputfile, inputlazfile)

        shpfiles = AtlassGen.FILELIST(
            ['*Water*.shp'], polygons_folder
        )  # we have already clipped the Islandwater bodies in the earlier stage so no need to clip them again
        print(len(shpfiles))

        clip_task = {}
        clip_task_results = []

        genlazinfo_task = {}
        genlazinfo_task_resilts = []

        for shpfile in shpfiles:
            path, id, ext = AtlassGen.FILESPEC(shpfile)

            hydrolaz = os.path.join(polygons_folder, '{0}.laz'.format(id))
            #Cut the hydro polygons in to seperate laz files

            clip_task[id] = AtlassTask(id, AtlassGen.clip, inputlazfile,
                                       hydrolaz, shpfile, 'laz')

        p = Pool(processes=cores)
        print('Clipping to polys started')
        clip_task_results = p.map(AtlassTaskRunner.taskmanager,
                                  clip_task.values())

        hydrolazfiles = AtlassGen.FILELIST(['*.laz'], polygons_folder)

        for lfile in hydrolazfiles:
            path, id, ext = AtlassGen.FILESPEC(lfile)

            #Generate lasinfo for each laz file
            genlazinfo_task[id] = AtlassTask(id, genLasinfo, lfile)

        print('Generating Lazinfo for polys started')
        genlazinfo_task_resilts = p.map(AtlassTaskRunner.taskmanager,
                                        genlazinfo_task.values())

        ############################################################################
        attribs = {}
        attribs['num_points'] = '  number of point records:    '
        attribs['min_xyz'] = '  min x y z:                  '
        attribs['max_xyz'] = '  max x y z:                  '

        txtfiles = AtlassGen.FILELIST(['*.txt'], polygons_folder)

        filedict1 = {}

        for file in txtfiles:
            path, name, extn = AtlassGen.FILESPEC(file)
            lazfile = os.path.join(path,
                                   '{0}.laz'.format(name)).replace('\\', '/')
            filedict1[name] = {}
            filedict1[name]['file'] = file.replace('\\', '/')
            filedict1[name]['lazfile'] = lazfile
            filedict1[name]['attribs'] = {}
            for attrib in attribs.keys():
                filedict1[name]['attribs'][attrib] = ''

        ##############################################################################

        adjusted_laz = AtlassGen.makedir(
            os.path.join(outputfolder,
                         'adjusted_hydro_polygons').replace('\\', '/'))
        #loop through tiles and summarise key attribs
        for name in filedict1.keys():

            print("\nStarting to clamp {0}".format(name))
            lines = [
                line.rstrip('\n') for line in open(filedict1[name]['file'])
            ]

            for line in lines:
                for attrib in attribs.keys():
                    if attribs[attrib] in line:
                        line = line.replace(attribs[attrib], '')
                        line = line.strip(' ')
                        filedict1[name]['attribs'][attrib] = line

            minz = round(
                float(filedict1[name]['attribs']['min_xyz'].split(' ')[2]), 3)
            maxz = round(
                float(filedict1[name]['attribs']['max_xyz'].split(' ')[2]), 3)
            diff = round(maxz - minz, 3)

            print('Polygon {0} minz : {1}'.format(name, minz))

            filedict1[name]['attribs']['minz'] = minz
            filedict1[name]['attribs']['maxz'] = maxz
            filedict1[name]['attribs']['diff'] = diff

            #Move file to a different location if diff is greater than 1m for manual check

            if diff < 1.0:
                if (minz % 0.50) == 0:
                    new_minz = minz - 0.250
                    print('\nMin z of {0} adjusted to {1}'.format(
                        name, new_minz))
                    filedict1[name]['attribs']['minz'] = new_minz
                    minz = new_minz

                lazfile = filedict1[name]['lazfile']
                print('Clamping Polygon : {0}'.format(name))
                outputfile = os.path.join(
                    adjusted_laz, '{0}.laz'.format(name).replace('\\', '/'))
                zadjust(lazfile, outputfile, minz)

            else:
                inputfile = filedict1[name]['lazfile']
                txtf = filedict1[name]['file']
                path, filename, ext = AtlassGen.FILESPEC(inputfile)
                manualCheckdir = AtlassGen.makedir(
                    os.path.join(outputfolder,
                                 'ManualCheck').replace('\\', '/'))
                lazfile = os.path.join(manualCheckdir,
                                       '{0}.laz'.format(filename)).replace(
                                           '\\', '/')
                otxtfile = os.path.join(manualCheckdir,
                                        '{0}.txt'.format(filename)).replace(
                                            '\\', '/')
                filedict1[name]['lazfile'] = lazfile

                movefile(inputfile, lazfile)
                movefile(txtf, otxtfile)

            print(name, filedict1[name]['attribs'])

        attribute_file = os.path.join(outputfolder, 'Ploy_Summary.json')
        with open(attribute_file, 'w') as f:
            # Using vars(args) returns the data as a dictionary
            json.dump(filedict1, f)

        #print('{0} polygons deleted due to zero area :\n{1}'.format(len(deleted_polygons),deleted_polygons))

        #Merge the hydro files to one file
        mergedfile = os.path.join(adjusted_laz,
                                  'Merged_Hydro_Output.laz').replace(
                                      '\\', '/')
        if os.path.isfile(mergedfile):
            os.remove(mergedfile)

        AtlassGen.mergeFiles(adjusted_laz, mergedfile, 'laz')

    if args.command == 'Step_3':
        print(
            "Clamping the polygons in Manual Check Folder after visual check.\nNOTE: minz will be used"
        )

        lazpath = args.lazpath
        outputfolder = args.outputfolder

        hydrofolder = AtlassGen.makedir(
            os.path.join(outputfolder, 'Zclamped_hydro_files'))

        ############################################################################
        attribs = {}
        attribs['num_points'] = '  number of point records:    '
        attribs['min_xyz'] = '  min x y z:                  '
        attribs['max_xyz'] = '  max x y z:                  '

        txtfiles = AtlassGen.FILELIST(['*.txt'], lazpath)

        filedict1 = {}

        for file in txtfiles:
            path, name, extn = AtlassGen.FILESPEC(file)
            lazfile = os.path.join(path,
                                   '{0}.laz'.format(name)).replace('\\', '/')
            filedict1[name] = {}
            filedict1[name]['file'] = file.replace('\\', '/')
            filedict1[name]['lazfile'] = lazfile
            filedict1[name]['attribs'] = {}
            for attrib in attribs.keys():
                filedict1[name]['attribs'][attrib] = ''

        ##############################################################################

        #loop through tiles and summarise key attribs
        for name in filedict1.keys():

            lines = [
                line.rstrip('\n') for line in open(filedict1[name]['file'])
            ]

            for line in lines:
                for attrib in attribs.keys():
                    if attribs[attrib] in line:
                        line = line.replace(attribs[attrib], '')
                        line = line.strip(' ')
                        filedict1[name]['attribs'][attrib] = line

            minz = round(
                float(filedict1[name]['attribs']['min_xyz'].split(' ')[2]), 3)
            maxz = round(
                float(filedict1[name]['attribs']['max_xyz'].split(' ')[2]), 3)
            diff = round(maxz - minz, 3)

            print('Polygon {0} minz : {1}'.format(name, minz))

            filedict1[name]['attribs']['minz'] = minz
            filedict1[name]['attribs']['maxz'] = maxz
            filedict1[name]['attribs']['diff'] = diff

            if (minz % 0.50) == 0:
                new_minz = minz - 0.250
                print('\nMin z of {0} adjusted to {1}'.format(name, new_minz))
                filedict1[name]['attribs']['minz'] = new_minz

            lazfile = filedict1[name]['lazfile']
            print('\nClamping Polygon : {0}'.format(name))
            outputfile = os.path.join(
                hydrofolder, '{0}.laz'.format(name).replace('\\', '/'))
            zadjust(lazfile, outputfile, minz)

        print("\nMerging Files\n")
        mergedfile = os.path.join(hydrofolder,
                                  'Merged_Hydro_Output2.laz').replace(
                                      '\\', '/')
        AtlassGen.mergeFiles(hydrofolder, mergedfile, 'laz')

    return
Пример #38
0
class Country:

    def __init__(self, name, abbreviation, rank, zoom, land_area, population, location, position, font):
        self.name = name
        self.abbr = abbreviation
        self.rank = rank
        self.zoom = zoom
        self.area = land_area
        self.population = population
        self.location = location
        self.position = position
        
        self.buffer = 2
        self.use_abbr = False
        
        self._original = deepcopy(position)
        self._label_shape = None
        
        self._minwidth, self._minheight = font.getsize(self.abbr)
        self._maxwidth, self._maxheight = font.getsize(self.name)

        self._update_label_shape()

    def __repr__(self):
        return '<Country: %s>' % self.abbr
    
    def __hash__(self):
        return id(self)

    def __cmp__(self, other):
        return compare_places(self, other)

    def __unicode__(self):
        return unicode(self.use_abbr and self.abbr or self.name)
    
    def _update_label_shape(self):
        """
        """
        x, y = self.position.x, self.position.y
        
        if self.use_abbr:
            width, height = self._minwidth, self._minheight
        else:
            width, height = self._maxwidth, self._maxheight
        
        x1, y1 = x - width/2, y - height/2
        x2, y2 = x + width/2, y + height/2
        
        self._label_shape = Polygon(((x1, y1), (x1, y2), (x2, y2), (x2, y1), (x1, y1)))
    
    def label_bbox(self):
        return self._label_shape.envelope
    
    def mask_shape(self):
        return self._label_shape.buffer(self.buffer).envelope
    
    def move(self):
        self.use_abbr = coin_flip()
    
        width = self.use_abbr and self._minwidth or self._maxwidth
        height = self.use_abbr and self._minheight or self._maxheight
        
        x = (random() - .5) * width
        y = (random() - .5) * height
    
        self.position.x = self._original.x + x
        self.position.y = self._original.y + y
        
        self._update_label_shape()
    
    def placement_energy(self):
        width = self.use_abbr and self._minwidth or self._maxwidth
        
        x = 2 * (self.position.x - self._original.x) / width
        y = 2 * (self.position.y - self._original.y) / width
        
        return int(self.use_abbr) + hypot(x, y) ** 2
    
    def overlap_energy(self, other):
        if self.overlaps(other):
            return min(10.0 / self.rank, 10.0 / other.rank)

        return 0.0
    
    def overlaps(self, other, reflexive=True):
        overlaps = self.mask_shape().intersects(other.label_bbox())
        
        if reflexive:
            overlaps |= other.overlaps(self, False)

        return overlaps

    def in_range(self, other, reflexive=True):
        range = hypot(self._maxwidth + self.buffer*2, self._maxheight + self.buffer*2)
        distance = hypot(self.position.x - other.position.x, self.position.y - other.position.y)
        in_range = distance <= range
        
        if reflexive:
            in_range |= other.in_range(self, False)

        return in_range
Пример #39
0
def read_DEM_index(index_file, DEM_MODEL):
    #-- read the compressed shapefile and extract entities
    shape = fiona.open('zip://{0}'.format(os.path.expanduser(index_file)))
    epsg = shape.crs['init']
    #-- extract attribute indice for DEM tile (REMA,GIMP) or name (ArcticDEM)
    if (DEM_MODEL == 'REMA'):
        #-- REMA index file attributes:
        #-- name: DEM mosaic name for tile (file name without suffix)
        #-- tile: DEM tile identifier (IMy_IMx)
        #-- nd_value: fill value for elements with no data
        #-- resolution: DEM horizontal spatial resolution (meters)
        #-- creationda: creation date
        #-- raster: (empty)
        #-- fileurl: link to file on PGC server
        #-- spec_type: specific type (DEM)
        #-- qual: density of scenes within tile (0 to 1)
        #-- reg_src: DEM registration source (ICESat or neighbor align)
        #-- num_gcps: number of ground control points
        #-- meanresz: mean vertical residual (meters)
        #-- active: (1)
        #-- qc: (2)
        #-- rel_ver: release version
        #-- num_comp: number of components
        #-- st_area_sh: tile area (meters^2)
        #-- st_length_: perimeter length of tile (meters)
        field = 'tile'
    elif (DEM_MODEL == 'GIMP'):
        #-- GIMP index file attributes (from make_GIMP_tile_shapefile.py):
        #-- name: DEM mosaic name for tile (file name without suffix)
        #-- tile: DEM tile identifier (IMy_IMx)
        #-- nd_value: fill value for elements with no data
        #-- resolution: DEM horizontal spatial resolution (meters)
        #-- fileurl: link to file on NSIDC server
        #-- spec_type: specific type (DEM)
        #-- reg_src: DEM registration source (ICESat or neighbor align)
        #-- rel_ver: release version
        #-- num_comp: number of components
        #-- st_area_sh: tile area (meters^2)
        #-- st_length_: perimeter length of tile (meters)
        field = 'tile'
    elif (DEM_MODEL == 'ArcticDEM'):
        #-- ArcticDEM index file attributes:
        #-- objectid: DEM tile object identifier for sub-tile
        #-- name: DEM mosaic name for sub-tile (file name without suffix)
        #-- tile: DEM tile identifier (IMy_IMx) (non-unique for sub-tiles)
        #-- nd_value: fill value for elements with no data
        #-- resolution: DEM horizontal spatial resolution (meters)
        #-- creationda: creation date
        #-- raster: (empty)
        #-- fileurl: link to file on PGC server
        #-- spec_type: specific type (DEM)
        #-- qual: density of scenes within tile (0 to 1)
        #-- reg_src: DEM registration source (ICESat or neighbor align)
        #-- num_gcps: number of ground control points
        #-- meanresz: mean vertical residual (meters)
        #-- active: (1)
        #-- qc: (2)
        #-- rel_ver: release version
        #-- num_comp: number of components
        #-- st_area_sh: tile area (meters^2)
        #-- st_length_: perimeter length of tile (meters)
        field = 'name'
    #-- create python dictionary for each polygon object
    poly_dict = {}
    attrs_dict = {}
    #-- extract the entities and assign by tile name
    for i, ent in enumerate(shape.values()):
        #-- tile or name attributes
        if DEM_MODEL in ('REMA', 'GIMP'):
            tile = str(ent['properties'][field])
        else:
            tile, = re.findall(r'^(\d+_\d+_\d+_\d+)', ent['properties'][field])
        #-- extract attributes and assign by tile
        attrs_dict[tile] = {}
        for key, val in ent['properties'].items():
            attrs_dict[tile][key] = val
        #-- upper-left, upper-right, lower-right, lower-left, upper-left
        ul, ur, lr, ll, ul2 = ent['geometry']['coordinates'].pop()
        #-- tile boundaries
        attrs_dict[tile]['xmin'] = ul[0]
        attrs_dict[tile]['xmax'] = lr[0]
        attrs_dict[tile]['ymin'] = lr[1]
        attrs_dict[tile]['ymax'] = ul[1]
        #-- extract Polar Stereographic coordinates for entity
        x = [ul[0], ur[0], lr[0], ll[0], ul2[0]]
        y = [ul[1], ur[1], lr[1], ll[1], ul2[1]]
        poly_obj = Polygon(list(zip(x, y)))
        #-- Valid Polygon may not possess overlapping exterior or interior rings
        if (not poly_obj.is_valid):
            poly_obj = poly_obj.buffer(0)
        poly_dict[tile] = poly_obj
    #-- close the file
    shape.close()
    #-- return the dictionaries of polygon objects and attributes
    return (poly_dict, attrs_dict, epsg)
def findTumor_NonTumorRegions(case_id,user):
  #execution_id=user+"_Tumor_Region";
  #execution_id2=user+"_Non_Tumor_Region";
  execution_id="*****@*****.**"
  execution_id2="";
  #print "case_id is:" +str(case_id);
    
  #handle only tumor region overlap
  humanMarkupList_tumor=[];
  tmp_tumor_markup_list0=[];
    
  for humarkup in objects.find({"provenance.image.case_id":case_id,
                                "provenance.analysis.execution_id":execution_id},
                               {"geometry":1,"_id":1}): 
    humarkup_polygon_tmp=humarkup["geometry"]["coordinates"][0]; 
    #print humarkup_polygon_tmp;            
    tmp_polygon=[tuple(i2) for i2 in humarkup_polygon_tmp];
    tmp_polygon2=Polygon(tmp_polygon); 
    tmp_polygon2=tmp_polygon2.convex_hull;       
    tmp_polygon2=tmp_polygon2.buffer(0);                                                  
    tmp_tumor_markup_list0.append(tmp_polygon2);
    #print tmp_tumor_markup_list0;
          
  #handle MultiPolygon
  tmp_tumor_markup_list=handMultiPolygon(tmp_tumor_markup_list0);   
  #print "len(tmp_tumor_markup_list)"+str(len(tmp_tumor_markup_list));
  #return  tmp_tumor_markup_list;
   
  #merge polygons if applicale            
  index_intersected=[];                                
  for index1 in range(0, len(tmp_tumor_markup_list)):  
    if index1 in index_intersected :#skip polygon,which is been merged to another one
      continue;
    humarkup_polygon1=tmp_tumor_markup_list[index1];         
    is_within=False;
    is_intersect=False;
    for index2 in range(0, len(tmp_tumor_markup_list)):  
      humarkup_polygon2=tmp_tumor_markup_list[index2];
      if (index1 <> index2):
        if (humarkup_polygon1.within(humarkup_polygon2)):    
          is_within=True;            
          break;              
        if (humarkup_polygon1.intersects(humarkup_polygon2)):
          humarkup_polygon1=humarkup_polygon1.union(humarkup_polygon2);           
          is_intersect=True;
          index_intersected.append(index2);                
    if(not is_within and not is_intersect):
      humanMarkupList_tumor.append(humarkup_polygon1);          
    if(is_within):
      continue;         
    if(is_intersect):          
      humanMarkupList_tumor.append(humarkup_polygon1); 
  print "len(humanMarkupList_tumor) is " + str(len(humanMarkupList_tumor));
            
  #handle only non tumor region overlap
  humanMarkupList_non_tumor=[];
  tmp_non_tumor_markup_list0=[];
  for humarkup in objects.find({"provenance.image.case_id":case_id,
                                "provenance.analysis.execution_id":execution_id2},
                               {"geometry":1,"_id":0}):
    humarkup_polygon_tmp=humarkup["geometry"]["coordinates"][0];             
    tmp_polygon=[tuple(i2) for i2 in humarkup_polygon_tmp];
    tmp_polygon2=Polygon(tmp_polygon); 
    tmp_polygon2=tmp_polygon2.convex_hull;       
    tmp_polygon2=tmp_polygon2.buffer(0);
    tmp_non_tumor_markup_list0.append(tmp_polygon2);   
      
  #handle MultiPolygon
  tmp_non_tumor_markup_list=handMultiPolygon(tmp_non_tumor_markup_list0);      
  index_intersected=[];                                
  for index1 in range(0, len(tmp_non_tumor_markup_list)):  
    if index1 in index_intersected :#skip polygon,which is been merged to another one
      continue;
    humarkup_polygon1=tmp_non_tumor_markup_list[index1]; 
    is_within=False;
    is_intersect=False;
    for index2 in range(0, len(tmp_non_tumor_markup_list)):  
      humarkup_polygon2=tmp_non_tumor_markup_list[index2];
      if (index1 <> index2):
        if (humarkup_polygon1.within(humarkup_polygon2)):    
          is_within=True;            
          break;              
        if (humarkup_polygon1.intersects(humarkup_polygon2)):
          humarkup_polygon1=humarkup_polygon1.union(humarkup_polygon2);          
          is_intersect=True;
          index_intersected.append(index2);                
    if(not is_within and not is_intersect):
      humanMarkupList_non_tumor.append(humarkup_polygon1);          
    if(is_within):
      continue;         
    if(is_intersect):          
      humanMarkupList_non_tumor.append(humarkup_polygon1); 
           
  #handle tumor and non tumor region cross overlap
  for index1,tumor_region in enumerate(humanMarkupList_tumor):    
    for index2,non_tumor_region in enumerate(humanMarkupList_non_tumor):      
      if (tumor_region.within(non_tumor_region)): 
        ext_polygon_intersect_points =list(zip(*non_tumor_region.exterior.coords.xy));   
        int_polygon_intersect_points =list(zip(*tumor_region.exterior.coords.xy)); 
        newPoly = Polygon(ext_polygon_intersect_points,[int_polygon_intersect_points]);
        humanMarkupList_non_tumor[index2]=newPoly;#add a hole to this polygon
      elif (non_tumor_region.within(tumor_region)): 
        ext_polygon_intersect_points =list(zip(*tumor_region.exterior.coords.xy));   
        int_polygon_intersect_points =list(zip(*non_tumor_region.exterior.coords.xy)); 
        newPoly = Polygon(ext_polygon_intersect_points,[int_polygon_intersect_points]);
        humanMarkupList_tumor[index1]=newPoly;#add a hole to this polygon   
  
  return  humanMarkupList_tumor,humanMarkupList_non_tumor;     
Пример #41
0
def filter_pot_bergs(poss_bergs, onedem):
    """
    Test each potential iceberg for validity, and if valid compute the sea level adjustment and
    get elevation pixel values for putting into the geodataframe.

    Parameter
    ---------
    poss_bergs : list of potential iceberg geometries
    """

    bergs = []
    elevs = []
    sl_adjs = []

    fjord = onedem.attrs['fjord']
    max_freebd = fjord_props.get_ice_thickness(fjord) / 10.0
    minfree = fjord_props.get_min_freeboard(fjord)
    res = onedem.attrs['res'][
        0]  #Note: the pixel area will be inaccurate if the resolution is not the same in x and y

    # 10 pixel buffer
    buffer = 10 * res

    for berg in poss_bergs:
        # make a valid shapely Polygon of the berg vertices
        # print(berg)
        origberg = Polygon(berg)
        # print('got a new iceberg')

        if origberg.is_valid == False or origberg.is_empty == True:
            # print("invalid or empty berg geometry")
            continue

        # create a negatively buffered berg outline to exclude border/water pixels
        berg = origberg.buffer(-buffer)
        if berg.is_valid == False or berg.is_empty == True:
            # print("invalid buffered inner-berg geometry")
            continue

        # get the largest polygon from a multipolygon (if one was created during buffering)
        if berg.geom_type == 'MultiPolygon':
            subbergs = list(berg)
            area = []
            for sb in subbergs:
                sb = Polygon(sb)
                area.append(sb.area)
            # print(area)
            idx = np.where(area == np.nanmax(area))[0]
            berg = Polygon(subbergs[idx[0]])
            # print('tried to trim down a multipolygon')

        if berg.is_valid == False:
            # print("invalid buffered multipology extraction")
            continue

        # remove holes
        if berg.interiors:
            berg = Polygon(list(berg.exterior.coords))
            # print('removed some holes')

        if berg.is_valid == False:
            # print("invalid buffered interiors geometry")
            continue

        # get the polygon complexity and skip if it's above the threshold
        complexity = vector_ops.poly_complexity(berg)
        if complexity >= 0.07:
            # print('border too complex. Removing...')
            continue

        # get the subset (based on a buffered bounding box) of the DEM that contains the iceberg
        # bounds: (minx, miny, maxx, maxy)
        bound_box = origberg.bounds
        try:
            berg_dem = onedem['elevation'].rio.slice_xy(*bound_box)
        except NoDataInBounds:
            coords = ('x', 'y', 'x', 'y')
            exbound_box = []
            for a, b in zip(bound_box, coords):
                exbound_box.append(getexval(onedem[b], b, a))
            berg_dem = onedem['elevation'].rio.slice_xy(*exbound_box)
            if np.all(np.isnan(berg_dem.values)):
                print("all nan area - no actual berg")
                continue

        # berg_dem = onedem['elevation'].sel(x=slice(bound_box[0]-buffer, bound_box[2]+buffer),
        #                                 # y=slice(bound_box[3]+buffer, bound_box[1]-buffer)) # pangeo? May have been because of issues with applying transform to right-side-up image above?
        # y=slice(bound_box[1]-buffer, bound_box[3]+buffer)) # my comp

        # print(bound_box)
        # print(np.shape(berg_dem))
        # print(berg_dem)
        # print(berg_dem.elevation.values)

        # extract the iceberg elevation values
        # Note: rioxarray does not carry crs info from the dataset to individual variables
        # print(berg)
        # print(len(bergs))
        # print(berg.area)
        try:
            vals = berg_dem.rio.clip([berg],
                                     crs=onedem.attrs['crs']).values.flatten()
        except NoDataInBounds:
            if berg.area < (res**2.0) * 10.0:
                continue
            # vals = berg_dem.rio.clip([berg], crs=onedem.attrs['crs'], all_touched=True).values.flatten()

        # remove nans because causing all kinds of issues down the processing pipeline (returning nan as a result and converting entire array to nan)
        vals = vals[~np.isnan(vals)]
        # print(vals)

        # skip bergs that likely contain a lot of cloud (or otherwise unrealistic elevation) pixels
        if np.nanmedian(
                vals
        ) > max_freebd:  # units in meters, matching those of the DEM elevation
            # print('"iceberg" too tall. Removing...')
            continue

        # get the pixel values for the original berg extent and a buffered version for the sea level adjustment
        orig_vals = berg_dem.rio.clip(
            [origberg], crs=onedem.attrs['crs']).values.flatten()
        orig_vals = orig_vals[~np.isnan(orig_vals)]

        slberg = origberg.buffer(
            buffer)  # get geometry bordering iceberg for sea level adjustment
        # get the regional elevation values and use to determine the sea level adjustment
        slvals = berg_dem.rio.clip([slberg],
                                   crs=onedem.attrs['crs']).values.flatten()
        slvals = slvals[~np.isnan(slvals)]

        sea = [val for val in slvals if val not in orig_vals]
        # NOTE: sea level adjustment (m) is relative to tidal height at the time of image acquisition, not 0 msl
        sl_adj = np.nanmedian(sea)
        # print(sl_adj)

        # check that the median freeboard elevation (pre-filtering) is at least x m above sea level
        if abs(np.nanmedian(vals) - sl_adj) < minfree:
            # print(np.nanmedian(vals))
            # print(sl_adj)
            print('median iceberg freeboard less than ' + str(minfree) + ' m')
            continue

        # apply the sea level adjustment to the elevation values
        vals = icalcs.apply_decrease_offset(vals, sl_adj)

        bergs.append(berg)
        elevs.append(vals)
        sl_adjs.append(sl_adj)

    print(len(bergs))

    return bergs, elevs, sl_adjs
Пример #42
0
        # give up
        break
    holes = unassigned
print >> sys.stderr, "%d retried, %d unassigned." % (retries, len(unassigned))

print >> sys.stderr, "Buffering polygons."
for place_id, polygon in polygons.items():
    if type(polygon) is Polygon:
        polygon = Polygon(polygon.exterior.coords)
    else:
        bits = []
        for p in polygon.geoms:
            if type(p) is Polygon:
                bits.append(Polygon(p.exterior.coords))
        polygon = MultiPolygon(bits)
    polygons[place_id] = polygon.buffer(0)

print >> sys.stderr, "Writing output."
features = []
for place_id, poly in polygons.items():
    features.append({
        "type": "Feature",
        "id": place_id,
        "geometry": poly.__geo_interface__,
        "properties": {
            "woe_id": place_id,
            "name": names.get(place_id, "")
        }
    })

collection = {"type": "FeatureCollection", "features": features}
Пример #43
0
    def evaluate_image(self, gt, pred):
        def get_union(pD, pG):
            return Polygon(pD).union(Polygon(pG)).area

        def get_intersection_over_union(pD, pG):
            return get_intersection(pD, pG) / get_union(pD, pG)

        def get_intersection(pD, pG):
            return Polygon(pD).intersection(Polygon(pG)).area

        matchedSum = 0

        numGlobalCareGt = 0
        numGlobalCareDet = 0

        detMatched = 0

        iouMat = np.empty([1, 1])

        gtPols = []
        detPols = []

        gtPolPoints = []
        detPolPoints = []

        # Array of Ground Truth Polygons' keys marked as don't Care
        gtDontCarePolsNum = []
        # Array of Detected Polygons' matched with a don't Care GT
        detDontCarePolsNum = []

        pairs = []
        detMatchedNums = []

        evaluationLog = ""

        for n in range(len(gt)):
            points = gt[n]['points']
            dontCare = gt[n]['ignore']
            points = Polygon(points)
            points = points.buffer(0)
            if not Polygon(points).is_valid or not Polygon(points).is_simple:
                continue

            gtPol = points
            gtPols.append(gtPol)
            gtPolPoints.append(points)
            if dontCare:
                gtDontCarePolsNum.append(len(gtPols) - 1)

        evaluationLog += "GT polygons: " + str(len(gtPols)) + (
            " (" + str(len(gtDontCarePolsNum)) +
            " don't care)\n" if len(gtDontCarePolsNum) > 0 else "\n")

        for n in range(len(pred)):
            points = pred[n]['points']
            points = Polygon(points)
            points = points.buffer(0)
            if not Polygon(points).is_valid or not Polygon(points).is_simple:
                continue

            detPol = points
            detPols.append(detPol)
            detPolPoints.append(points)
            if len(gtDontCarePolsNum) > 0:
                for dontCarePol in gtDontCarePolsNum:
                    dontCarePol = gtPols[dontCarePol]
                    intersected_area = get_intersection(dontCarePol, detPol)
                    pdDimensions = Polygon(detPol).area
                    precision = 0 if pdDimensions == 0 else intersected_area / pdDimensions
                    if precision > self.area_precision_constraint:
                        detDontCarePolsNum.append(len(detPols) - 1)
                        break

        evaluationLog += "DET polygons: " + str(len(detPols)) + (
            " (" + str(len(detDontCarePolsNum)) +
            " don't care)\n" if len(detDontCarePolsNum) > 0 else "\n")

        if len(gtPols) > 0 and len(detPols) > 0:
            # Calculate IoU and precision matrixs
            outputShape = [len(gtPols), len(detPols)]
            iouMat = np.empty(outputShape)
            gtRectMat = np.zeros(len(gtPols), np.int8)
            detRectMat = np.zeros(len(detPols), np.int8)
            for gtNum in range(len(gtPols)):
                for detNum in range(len(detPols)):
                    pG = gtPols[gtNum]
                    pD = detPols[detNum]
                    iouMat[gtNum, detNum] = get_intersection_over_union(pD, pG)

            for gtNum in range(len(gtPols)):
                for detNum in range(len(detPols)):
                    if gtRectMat[gtNum] == 0 and detRectMat[detNum] == 0 and \
                            gtNum not in gtDontCarePolsNum and \
                            detNum not in detDontCarePolsNum:
                        if iouMat[gtNum, detNum] > self.iou_constraint:
                            gtRectMat[gtNum] = 1
                            detRectMat[detNum] = 1
                            detMatched += 1
                            pairs.append({'gt': gtNum, 'det': detNum})
                            detMatchedNums.append(detNum)
                            evaluationLog += "Match GT #" + \
                                str(gtNum) + " with Det #" + str(detNum) + "\n"

        numGtCare = (len(gtPols) - len(gtDontCarePolsNum))
        numDetCare = (len(detPols) - len(detDontCarePolsNum))
        if numGtCare == 0:
            recall = float(1)
            precision = float(0) if numDetCare > 0 else float(1)
        else:
            recall = float(detMatched) / numGtCare
            precision = 0 if numDetCare == 0 else float(
                detMatched) / numDetCare

        hmean = 0 if (precision + recall) == 0 else 2.0 * \
            precision * recall / (precision + recall)

        matchedSum += detMatched
        numGlobalCareGt += numGtCare
        numGlobalCareDet += numDetCare

        perSampleMetrics = {
            'precision': precision,
            'recall': recall,
            'hmean': hmean,
            'pairs': pairs,
            'iouMat': [] if len(detPols) > 100 else iouMat.tolist(),
            'gtPolPoints': gtPolPoints,
            'detPolPoints': detPolPoints,
            'gtCare': numGtCare,
            'detCare': numDetCare,
            'gtDontCare': gtDontCarePolsNum,
            'detDontCare': detDontCarePolsNum,
            'detMatched': detMatched,
            'evaluationLog': evaluationLog
        }

        return perSampleMetrics
Пример #44
0
    def testMinimumCircleSetCoverForLineSetGreedyRandom(self):
        print("testMinimumCircleSetCoverForLineSetGreedyRandom")
        line_endpoints = []
        line_segments = []
        random.seed(0)
        start = [100, 100]
        end = [200, 120]

        p1 = start[0]
        nsteps = 200
        for i in range(1, nsteps):
            p2 = random.randint(start[1], end[1])
            line_endpoints.append((p1, p2))
            p1 = start[0] + float(end[0] - start[0]) / float(nsteps) * i

        lineString = LineString(line_endpoints)
        centers = []
        x_coords = [random.randint(100, 200) for i in range(1, 150)]
        sorted_xcoords = sorted(x_coords)

        for p1 in sorted_xcoords:
            p2 = random.randint(90, 100)
            centers.append((p1, p2))

        for i in range(len(line_endpoints) - 1):
            line_segments.append(
                line.Line(line_endpoints[i], line_endpoints[i + 1]))
        circ, included = circlecover.min_line_cover_greedy(
            centers, line_endpoints, min_center_distance=20)
        testName = "LineCoverGreedyRandom"
        bp = excessarea.generate_bounding_polygon(line_endpoints, centers)
        printcover.printCover(bp, circ, centers, 20, included, testName,
                              VAR_RADIUS)

        for point in line_endpoints:
            flag = False
            for c in circ:
                if c.inside(point):
                    flag = True
                    break
            self.assertTrue(flag)

        u = Polygon()

        for c in circ:
            u = u.union(c.get_geometry())

        self.assertTrue(u.buffer(1).contains(lineString))

        # Check if the segments in the cover are distinct subsets.
        for i in range(0, len(included)):
            for k in range(i + 1, len(included) - 1):
                for l in included[i]:
                    for m in included[k]:
                        self.assertFalse(l == m)

        circ, included = circlecover.min_point_cover_greedy_with_fixed_discs(
            centers, line_endpoints, min_center_distance=20)
        for point in line_endpoints:
            flag = False
            for c in circ:
                if c.inside(point):
                    flag = True
                    break
            self.assertTrue(flag)
        bp = excessarea.generate_bounding_polygon(line_endpoints, centers)
        printcover.printCover(bp, circ, centers, 20, [], testName,
                              FIXED_RADIUS)
        with concurrent.futures.ProcessPoolExecutor(
                max_workers=max_workers) as executor:
            for index, tile_min_point in enumerate(unique_tile_min_point_list):
                tile_width, tile_height = getTileMetaData(
                    tile_min_point, local_img_folder, prefix_list)
                tile_minx = tile_min_point[0]
                tile_miny = tile_min_point[1]
                x1t = float(tile_minx) / float(image_width)
                y1t = float(tile_miny) / float(image_height)
                x2t = float(tile_minx + tile_width) / float(image_width)
                y2t = float(tile_miny + tile_height) / float(image_height)
                tile_polygon_0 = [[x1t, y1t], [x2t, y1t], [x2t, y2t],
                                  [x1t, y2t], [x1t, y1t]]
                tmp_poly = [tuple(i1) for i1 in tile_polygon_0]
                tmp_polygon = Polygon(tmp_poly)
                tile_polygon = tmp_polygon.buffer(0)
                tileHumanMarkupRelation_tumor = "disjoin"

                for humanMarkup in humanMarkupList_tumor:
                    if (tile_polygon.within(humanMarkup)):
                        tileHumanMarkupRelation_tumor = "within"
                        tumor_related_tile = True
                        break
                    elif (tile_polygon.intersects(humanMarkup)):
                        tileHumanMarkupRelation_tumor = "intersect"
                        tile_humanmarkup_intersect_polygon_tumor = humanMarkup
                        tumor_related_tile = True
                        tile_polygon = tile_polygon.intersection(humanMarkup)
                        break
                    else:
                        tileHumanMarkupRelation_tumor = "disjoin"
Пример #46
0
class Navigation(object):
    """Common navigation machinery used by different modules.
    
    Stores boat position (both lat/long and x/y based on UTM projection), and
    heading, along with apparent wind angle.
    """
    def __init__(self,
                 beating_angle=45,
                 utm_zone=30,
                 jibe_to_turn=False,
                 safety_zone_ll=None,
                 safety_zone_margin=5):
        """
        beating_angle : Closest absolute angle relative to the wind that we can
            sail
        utm_zone : Zone number of the UTM system to use. Southampton is in
            zone 30, Portugal in zone 29. http://www.dmap.co.uk/utmworld.htm
            Distance calculations will be less accurate the further from the
            specified zone you are.
        jibe_to_turn : True to turn by jibing instead of tacking.
        safety_zone_ll : A series of lat/lon points we should stay within.
        safety_zone_margin : The safety buffer (in metres) to stay inside
            the bounding box.
        """
        self.projection = Proj(proj='utm', zone=utm_zone, ellps='WGS84')
        self.position_ll = ll = LatLon(50.8, 1.02)
        x, y = self.latlon_to_utm(ll.lat.decimal_degree, ll.lon.decimal_degree)
        self.position_xy = Point(x, y)
        self.heading = 0.
        self.wind_direction = 0.
        self.beating_angle = beating_angle
        self.jibe_to_turn = jibe_to_turn
        self.safety_zone_ll = safety_zone_ll
        self.safety_zone_margin = safety_zone_margin
        if safety_zone_ll:
            self.safety_zone = Polygon(
                [self.latlon_to_utm(*p) for p in safety_zone_ll])
            self.safety_zone_inner = self.safety_zone.buffer(
                -safety_zone_margin)
        else:
            self.safety_zone = self.safety_zone_inner = None

    def update_position(self, msg):
        self.position_ll = LatLon(msg.latitude, msg.longitude)
        x, y = self.latlon_to_utm(msg.latitude, msg.longitude)
        self.position_xy = Point(x, y)

    def latlon_to_utm(self, lat, lon):
        """Returns (x, y) coordinates in metres"""
        return self.projection(lon, lat)

    def utm_to_latlon(self, x, y):
        """Returns a LatLon object"""
        lon, lat = self.projection(x, y, inverse=True)
        return LatLon(lat, lon)

    def update_heading(self, msg):
        self.heading = msg.data

    def update_wind_direction(self, msg):
        self.wind_direction = msg.data

    def absolute_wind_direction(self):
        """Convert apparent wind direction to absolute wind direction"""
        # This assumes that our speed is negligible relative to wind speed.
        return angleSum(self.heading, self.wind_direction)

    def angle_to_wind(self):
        """Calculate angle relative to wind (-180 to 180)

        Angle relative to wind is reversed from wind direction: if the wind is
        coming from 90, the angle relative to the wind is -90.
        """
        wd = self.wind_direction
        if wd > 180:
            wd -= 360
        return -wd

    def heading_to_wind_angle(self, heading):
        """Convert a compass heading (0-360) to an angle relative to the wind (+-180)
        """
        return angle_subtract(heading, self.absolute_wind_direction())

    def wind_angle_to_heading(self, wind_angle):
        """Convert angle relative to the wind (+-180) to a compass heading (0-360).
        """
        return angleSum(self.absolute_wind_direction(), wind_angle)

    def check_safety_zone(self):
        """Check if the boat is within the safety zone.

        0 : Comfortably inside the safety zone (or no safety zone specified)
        1 : Inside the safety zone, but in the margin
        2 : Outside the safety zone
        """
        if self.safety_zone is None:
            return 0

        if self.position_xy.within(self.safety_zone_inner):
            return 0
        if self.position_xy.within(self.safety_zone):
            return 1
        return 2

    def distance_and_heading(self, wp):
        """Calculate the distance and heading from current position to wp.

        wp should both be a shapely.geometry.Point object
        """
        dx = wp.x - self.position_xy.x
        dy = wp.y - self.position_xy.y
        d = (dx**2 + dy**2)**0.5
        h = math.degrees(math.atan2(dx, dy)) % 360
        return d, h
Пример #47
0
    holes = unassigned
print >>sys.stderr, "%d retried, %d unassigned." % (retries, len(unassigned))

hoodIndex = Rtree()

print >>sys.stderr, "Buffering polygons."
for place_id, polygon in polygons.items():
    if type(polygon) is Polygon:
        polygon = Polygon(polygon.exterior.coords)
    else:
        bits = []
        for p in polygon.geoms:
            if type(p) is Polygon:
                bits.append(Polygon(p.exterior.coords))
        polygon = MultiPolygon(bits)
    polygons[place_id] = polygon.buffer(0)
    hoodIndex.insert(place_id, polygons[place_id].bounds)

print >>sys.stderr, "Retconning blocks to shapes."
cur.execute("""select geom, geoid10 FROM tabblock10 tb WHERE statefp10 = %s AND countyfp10 = %s AND blockce10 NOT LIKE '0%%'""", (statefp10, countyfp10))
for r in cur.fetchall():
  poly = wkb.loads(r[0].decode('hex'))
  id = r[1]
  candidates = [i for i in hoodIndex.intersection(poly.bounds)]
  found = False
  for place_id in candidates:
    hood = polygons[place_id]
    if hood.contains(poly):
      cur.execute("""DELETE FROM votes WHERE source=%s AND id=%s""", ('blockr', id))
      cur.execute("""INSERT INTO votes (id, label, count, source) values (%s, %s, %s, 'blockr')""", (
          id, place_id, 1))
Пример #48
0
def ExtractCrackNets4(image_size_wh,
                      in_list: List[list],
                      mask_resolution:int=160,
                      loop_devide_eps:int=2,
                      min_area_in_bbox_coef:float=0.3,
                      min_poly_bbox_area_coef:float=0.008,
                      enable_subclustering:bool=True,
                      min_points_in_cluster:int=10
                      ) -> List[list]:
    """
    Принимает набор линий и генерирует по ним полигоны сеток трещин.
    Принцип работы:
        0) Вынимаем из всех линий точки и забываем про линии
        1) Переводим координаты точек в условные значения (mask_resolution)
        2) Кластеризуем если включена опция (enable_subclustering). Если нет - то все точки в одном кластере
        3) Для каждого кластера создаем маску пониженного разрешения и рисуем на ней точки.
        4) Постпроцессинг для масок (убираем дыры и тд)
        5) Находим контуры - это наши новые кластеры 2-го порядка
        5.1) Убираем петли и делаем из них отдельные кластеры.
        6) Фильтруем кластеры 2-го порядка по условным площадям:
            6.1) [площадь кластера]/[площадь описывающего прямоугольник] < (min_area_in_bbox_coef)
            6.2) [площадь кластера]/[вся площадь] < (Параметр: min_poly_bbox_area_coef)
        7) Преобразуем координы в абсолютный масштаб и созраняем кластер как полигон

    :param image_size_wh: размер исходного изображения (ширина, высота)
    :param in_list: Список простых линий - (линия = список точек)
    :param mask_resolution: Разрешение для маски (грубость маски) - желательно: [80-500]
    :param loop_devide_eps: Условное расстояние между двумя точками полигона чтобы определить петлю.
    :param min_area_in_bbox_coef: минимальная площадь заполнения кластером описанного прямоугольника
    :param min_poly_bbox_area_coef: минимальная площадь bbox ко всей площади
    :param enable_subclustering: использовать предварительную кластеризацию
    :param min_points_in_cluster: минимальное количество точек в кластере (предварительная кластеризация)
    :return: список полигонов
    """

    if len(in_list) == 0:
        return []

    points = []
    for line in in_list:
        points.extend(line)

    simple_format_points = np.array(points)

    image_width = image_size_wh[0]
    image_height = image_size_wh[1]

    target_width = mask_resolution
    target_height = int(target_width * (image_height / image_width))
    target_area = float(target_width) * target_height

    width_coef = target_width / float(image_width)
    heigh_coef = target_height / float(image_height)

    simple_format_points[:, 0] = simple_format_points[:, 0] * width_coef
    simple_format_points[:, 1] = simple_format_points[:, 1] * heigh_coef
    simple_format_points = np.array(simple_format_points, np.uint8)
    res = []

    integer_points = simple_format_points.tolist()

    # We need clusteriazation?
    if enable_subclustering and len(simple_format_points) >= min_points_in_cluster:
        import hdbscan
        hdb = hdbscan.HDBSCAN(min_cluster_size=min_points_in_cluster).fit(simple_format_points)
        hdb_labels = hdb.labels_
        points_group = combine_clusters(labels=hdb_labels, X=integer_points)
    else:
        points_group = [integer_points]

    masks = []

    for simple_format_points in points_group:
        mask_image = np.zeros((target_height, target_width), np.uint8)

        for point in simple_format_points:
            point_drawing_size = 1
            cv2.circle(mask_image, (point[0], point[1]), point_drawing_size, (255, 255, 255), -1)

        # im_num = random.randint(1000, 9999)
        # cv2.imwrite("/sly_task_data/tmp/qqq_{}.png".format(im_num), mask_image)

        kernel = np.ones((5, 5), np.uint8)

        # mask_image = cv2.erode(mask_image, kernel, iterations=1)
        # mask_image = cv2.dilate(mask_image, kernel, iterations=1)

        mask_image = cv2.morphologyEx(mask_image, cv2.MORPH_CLOSE, kernel)

        # cv2.imwrite("/sly_task_data/tmp/qqq_{}p.png".format(im_num), mask_image)

        masks.append(mask_image)



    for mask_image in masks:
        contours, hierarchy = cv2.findContours(mask_image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
        clusters = []
        for contour in contours:
            contour = np.squeeze(contour, axis=1)
            clusters.extend(remove_poly_loops(contour, eps=loop_devide_eps))

        # for i in range(len(clusters)):
        #    clusters[i] = remove_intersections_in_poly(clusters[i])

        for cluster in clusters:
            cluster = np.array(cluster)

            width = np.max(cluster[:, 0]) - np.min(cluster[:, 0])
            height = np.max(cluster[:, 1]) - np.min(cluster[:, 1])

            max_size_ratio_check = 8
            if width == 0 or height == 0 or width / height > max_size_ratio_check or height / width > max_size_ratio_check:
                continue

            poly_area = get_simple_poly_area(cluster.tolist())
            bbox_area = width * height

            # Фильтрация по заполнению
            if poly_area / bbox_area < min_area_in_bbox_coef:
                continue

            # Фильтрация по размеру BBox на изображении
            if bbox_area / target_area < min_poly_bbox_area_coef:
                continue

            cluster = np.array(cluster, np.float)
            cluster[:, 0] = cluster[:, 0] / width_coef
            cluster[:, 1] = cluster[:, 1] / heigh_coef
            cluster = cluster.astype(np.int)

            c_exterior = cluster.tolist()
            poly = Polygon(shell=c_exterior)

            if poly.is_valid == False:
                poly = poly.buffer(0)

            if poly.geom_type == 'MultiPolygon':
                for p in poly:
                    cluster = np.transpose(p.exterior.coords.xy).tolist()
                    res.append(cluster)
            else:
                cluster = np.transpose(poly.exterior.coords.xy).tolist()
                res.append(cluster)
    return res
Пример #49
0
def filter_planes_and_holes(polygons, points, config_pp, rm=None):
    """Extracts the plane and obstacles returned from polylidar.
    This function performs post-processing of the Polygons returned by Polylidar3D using the Shapely library.
    If the polygons are 3D you must provide a scipy rotation matrix such that the polygon
    align with they XY plane (Shapely can only handle 2D polygons with XY coordinates).
    The basic steps are:
        * Simplification of Polygon by config_pp['simplify]
        * Positive buffer of Polygon by config_pp['positive_buffer]
        * Negative Buffer of POlygons by config_pp['negative_buffer]
        * Simplification of Polygon by config_pp['simplify]
        * Remove polygons whose area is less or greater than data in config_pp['filter]['plane_area']
        * Remove holes whose vertices are less than data in config_pp['filter]['hole_vertices']
        * Remove holes whose area is less or greater than data in config_pp['filter]['hole_area']
    It then returns the shapely polygons of the polygons and holes (obstacles)
    An example config_pp
    .. code-block:: python
        {
            positive_buffer: 0.005 # m, Positively expand polygon.  Fills in small holes
            negative_buffer: 0.03 # m, Negative buffer to polygon. Expands holes and constricts outer hull of polygon
            simplify: 0.02  # m, simplify edges of polygon
            filter: # obstacles must have these characteristics
                hole_area:
                    min: 0.025   # m^2
                    max: 0.785 # m^2
                hole_vertices:
                    min: 6
                plane_area:
                    min: .5 # m^2
        }
    Arguments:
        polygons {list[Polygons]} -- A list of polygons returned from polylidar
        points {ndarray} -- MX3 array
        config_pp {dict} -- Configuration for post processing filtering
        rm {scipy.spatial.RotationMatrix} -- Rotation matrix applied to 3D polygons to make 2D
    Returns:
        tuple -- A list of plane shapely polygons and a list of holes in polygons
    """
    # filtering configuration
    post_filter = config_pp['filter']

    # will hold the plane(s) and obstacles found
    planes = []
    obstacles = []
    planes_indices = []
    # print("Polylidar returned {} polygons, ".format(len(polygons)))
    for poly_index, poly in enumerate(polygons):
        t0 = time.perf_counter()
        if rm is not None:
            shell_coords = rm.apply(get_points(poly.shell, points))
            hole_coords = [rm.apply(get_points(hole, points)) for hole in poly.holes]
        else:
            shell_coords = get_points(poly.shell, points)
            hole_coords = [get_points(hole, points) for hole in poly.holes]
        t1 = time.perf_counter()
        poly_shape = Polygon(shell=shell_coords, holes=hole_coords)
        t2 = time.perf_counter()
        # print(poly_shape.is_valid)
        # fig, ax = plt.subplots(figsize=(10, 10), nrows=1, ncols=1)
        # plot_poly(poly_shape, ax, poly)
        # plt.axis('equal')
        # plt.show()
        # print(poly_shape.is_valid)
        # assert poly_shape.is_valid
        area = poly_shape.area
        # logging.info("Got a plane!")
        if post_filter['plane_area']['min'] and area < post_filter['plane_area']['min']:
            # logging.info("Skipping Plane")
            continue
        z_value = shell_coords[0][2]

        t3 = time.perf_counter()
        if config_pp['simplify']:
            poly_shape = poly_shape.simplify(
                tolerance=config_pp['simplify'], preserve_topology=True)
        t4 = time.perf_counter()
        # Perform 2D geometric operations
        if config_pp['positive_buffer']:
            poly_shape = poly_shape.buffer(
                config_pp['positive_buffer'], join_style=JOIN_STYLE.mitre, resolution=4)
        t5 = time.perf_counter()
        if config_pp['negative_buffer']:
            poly_shape = poly_shape.buffer(
                distance=-config_pp['negative_buffer'], join_style=JOIN_STYLE.mitre, resolution=4)
            # if poly_shape.geom_type == 'MultiPolygon':
            #     all_poly_shapes = list(poly_shape.geoms)
            #     poly_shape = sorted(
            #         all_poly_shapes, key=lambda geom: geom.area, reverse=True)[0]
        t6 = time.perf_counter()
        # poly_shape = poly_shape.buffer(distance=config_pp['negative_buffer'], resolution=4)
        if config_pp['simplify']:
            poly_shape = poly_shape.simplify(
                tolerance=config_pp['simplify'], preserve_topology=True)  # False makes fast, but can cause invalid polygons
        t7 = time.perf_counter()
        if poly_shape.geom_type == 'MultiPolygon':
            all_poly_shapes = list(poly_shape.geoms)
            # poly_shape = sorted(
            #     all_poly_shapes, key=lambda geom: geom.area, reverse=True)[0]
        else:
            all_poly_shapes = [poly_shape]

        logging.debug("Rotation: {:.2f}; Polygon Creation: {:.2f}; Simplify 1: {:.2f}; Positive Buffer: {:.2f}; Negative Buffer: {:.2f}; Simplify 2: {:.2f}".format(
            (t1 - t0) * 1000, (t2 - t1) * 1000, (t4 - t3) * 1000, (t5 - t4) * 1000, (t6 - t5) * 1000, (t7 - t6) * 1000
        ))

        # Its possible that our polygon has no broken into a multipolygon
        # Check for this situation and handle it
        # all_poly_shapes = [poly_shape]
        # print(len(all_poly_shapes))
        # iterate through every polygons and check for plane extraction
        for poly_shape in all_poly_shapes:
            area = poly_shape.area
            # print(poly_shape.geom_type, area)
            # logging.info("Plane is big enough still")
            if post_filter['plane_area']['min'] <= 0 or area >= post_filter['plane_area']['min']:
                dim = np.asarray(poly_shape.exterior).shape[1]
                # logging.info("Plane is big enough still")
                if config_pp['negative_buffer'] or config_pp['simplify'] or config_pp['positive_buffer'] and dim < 3:
                    # convert back to 3D coordinates
                    # create kd tree for vertex lookup after buffering operations
                    t8 = time.perf_counter()
                    kd_tree = create_kd_tree(shell_coords, hole_coords)
                    t9 = time.perf_counter()
                    poly_shape = recover_3d(poly_shape, kd_tree, z_value)
                    t10 = time.perf_counter()
                    logging.debug("Create KD Tree: {:.2f}; Recover Polygon 3D Coordinates: {:.2f}".format(
                        (t9 - t8) * 1000, (t10 - t9) * 1000
                    ))

                # Capture the polygon as well as its z height
                # after applying buffering and simplification with shapely/geos all polygons are valid
                # print(poly_shape.is_valid)
                new_plane_polygon = Polygon(shell=poly_shape.exterior)
                planes.append((new_plane_polygon, z_value))
                planes_indices.append(poly_index)

                for hole_lr in poly_shape.interiors:
                    # Filter by number of obstacle vertices, removes noisy holes
                    if len(hole_lr.coords) > post_filter['hole_vertices']['min']:
                        hole_poly = Polygon(shell=hole_lr)
                        area = hole_poly.area
                        # filter by area
                        if post_filter['hole_area']['min'] <= 0.0 or area >= post_filter['hole_area']['min'] and area < post_filter['hole_area']['max']:
                            z_value = hole_lr.coords[0][2]
                            obstacles.append((hole_poly, z_value))
    if rm is not None:
        t11 = time.perf_counter()
        rm_inv = rm.inv()
        for i, (poly, z_value) in enumerate(planes):
            points = np.asarray(poly.exterior)
            new_poly = Polygon(rm_inv.apply(points))
            planes[i] = (new_poly, z_value)

        for i, (poly, z_value) in enumerate(obstacles):
            points = np.asarray(poly.exterior)
            new_poly = Polygon(rm_inv.apply(points))
            obstacles[i] = (new_poly, z_value)
        t12 = time.perf_counter()
        logging.debug("Revert Rotation and Create New Polygons: {:2f}".format((t12 - t11) * 1000))
    return planes, obstacles, planes_indices
Пример #50
0
                patch_humanmarkup_intersect_polygon = Polygon([(0, 0), (1, 1),
                                                               (1, 0)])
                patch_min_x_pixel = i * patch_size
                patch_min_y_pixel = j * patch_size
                patch_width_unit = float(patch_size) / float(image_width)
                patch_height_unit = float(patch_size) / float(image_height)
                x10 = float(i * float(patch_size)) / float(image_width)
                y10 = float(j * float(patch_size)) / float(image_height)
                x20 = float((i + 1) * float(patch_size)) / float(image_width)
                y20 = float((j + 1) * float(patch_size)) / float(image_height)
                #print i,j,patch_size,image_width,image_height,x10,y10,x20,y20;
                patch_polygon1 = [[x10, y10], [x20, y10], [x20, y20],
                                  [x10, y20], [x10, y10]]
                tmp_poly = [tuple(i1) for i1 in patch_polygon1]
                tmp_polygon = Polygon(tmp_poly)
                patch_polygon = tmp_polygon.buffer(0)
                patch_polygon_bound = patch_polygon.bounds
                patch_polygon_area = patch_polygon.area

                for humanMarkup in humanMarkupList:
                    if (patch_polygon.within(humanMarkup)):
                        #print "-- within --" ;
                        patchHumanMarkupRelation = "within"
                        break
                    elif (patch_polygon.intersects(humanMarkup)):
                        #print "-- intersects --";
                        patchHumanMarkupRelation = "intersect"
                        patch_humanmarkup_intersect_polygon = humanMarkup
                        break
                    else:
                        #print "-- disjoin --";
    def compute_environment(self):
        drones = self.__drones
        points = self.__points

        # 1.- Get polygon giving a list of points
        from shapely.geometry import Polygon
        polygon = Polygon(points)
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        plt.plot(*polygon.exterior.xy)  # Only for Python 3
        plt.savefig(Config.BASE_ROUTE + 'field_polygon.png')
        plt.clf()

        # 2.- Get minimum bounding rectangle
        # 2.1.- We need coordinates closest to south (min_x), north (max_x), west (min_y) and east (max_y)
        min_x = min(points, key=lambda t: t[0])[0]
        max_x = max(points, key=lambda t: t[0])[0]
        min_y = min(points, key=lambda t: t[1])[1]
        max_y = max(points, key=lambda t: t[1])[1]

        # 2.2.- Get number of squares verticaly (num_v) and horizontaly (num_h) giving drones' minimum image rectangle
        import math
        num_v = Config.ENVIRONMENT_ROWS
        num_h = Config.ENVIRONMENT_COLUMNS

        # 3.3.- Create a numpy matrix with a cell for each image square
        import numpy as np
        environment = np.zeros((num_h, num_v))

        # 3.4.- Get coordinates deltas for computing points
        d_v = (max_y - min_y) / num_v
        d_h = (max_x - min_x) / num_h

        # 3.4 Get original operator's point
        from shapely.ops import nearest_points
        closest_point = nearest_points(polygon.exterior, self.__operator_position)[0]

        # 3.5.- Check visitable squares as 1
        import itertools
        for (i, j) in itertools.product(list(range(num_v)), list(range(num_h))):  # i: [0, num_v-1], j: [0, num_h-1]
            sp1 = (j * d_h + min_x, (num_v - i) * d_v + min_y)
            sp2 = ((j + 1) * d_h + min_x, (num_v - i) * d_v + min_y)
            sp3 = (j * d_h + min_x, (num_v - (i + 1)) * d_v + min_y)
            sp4 = ((j + 1) * d_h + min_x, (num_v - (i + 1)) * d_v + min_y)
            square = Polygon([sp1, sp2, sp4, sp3])

            if Config.SQUARE:
                environment[num_h - (j + 1), num_v - (i + 1)] = 1.0  # Marked as navigable square

            if polygon.intersects(square.buffer(1e-9)) or polygon.contains(square.buffer(1e-9)):
               
                if not Config.SQUARE:
                    environment[num_h - (j + 1), num_v - (i + 1)] = 1.0  # Marked as navigable square

                if Config.START_CORNER_0_0 and Config.SQUARE:
                    self.__drone_initial_position = (0, 0)
                elif closest_point.within(square) or closest_point.intersects(square):
                    self.__drone_initial_position = (
                        num_h - (j + 1), num_v - (i + 1))  # Set operator's position as initial position

        self.__original_environment = environment

        import numpy as np
        np.savetxt(Config.BASE_ROUTE + Config.MAP_ROUTE, environment)

        import matplotlib
        matplotlib.use('Agg')  # For running in SO without graphical environment
        import matplotlib.pyplot as plt
        from matplotlib.ticker import MaxNLocator
        ax = plt.figure().gca()
        ax.invert_yaxis()
        ax.xaxis.set_major_locator(MaxNLocator(integer=True))
        ax.yaxis.set_major_locator(MaxNLocator(integer=True))
        computed_environment = environment.copy()
        computed_environment[self.__drone_initial_position] = 3
        ax.pcolor(computed_environment, cmap='Greys', edgecolors='gray')
        plt.savefig(Config.BASE_ROUTE + 'computed_environment.png')
        plt.clf()

        return environment
Пример #52
0
                        DROP TABLE IF EXISTS {metadata_table};
                        CREATE TABLE {metadata_table} (dataset text PRIMARY KEY, source text, 
                        frequency interval , start timestamp);
                        INSERT INTO {metadata_table} (dataset, frequency, start) VALUES 
                            ('rain', '1 day', '2000-01-01');

                        DROP TABLE IF EXISTS {dem_table};
                        CREATE TABLE {dem_table} (rid serial PRIMARY KEY, rast raster);
                        INSERT INTO {dem_table}(rast) VALUES (ST_FromGDALRaster(%(rast)s));
                        
                        DROP TABLE IF EXISTS {buildings_table};
                        CREATE TABLE {buildings_table} (gid serial PRIMARY KEY, geom geometry);
                        INSERT INTO {buildings_table} (gid, geom) VALUES (500, ST_GeomFromText(%(buildings)s, 27700));
                        
                        DROP TABLE IF EXISTS {green_areas_table};
                        CREATE TABLE {green_areas_table} (gid serial PRIMARY KEY, geom geometry);
                        INSERT INTO {green_areas_table} (gid, geom) VALUES (500, ST_GeomFromText(%(green_areas)s, 27700));
                    """).format(
                domain_table=sql.Identifier(r.domain_table),
                dem_table=sql.Identifier(r.dem_table),
                rain_table=sql.Identifier(r.rain_table),
                metadata_table=sql.Identifier(r.metadata_table),
                buildings_table=sql.Identifier(r.buildings_table),
                green_areas_table=sql.Identifier(r.green_areas_table)
            ),
            dict(
                geom=str(geom),
                buildings=str(geom.buffer(-70)),
                green_areas=str(geom.buffer(-100)),
                rast=psycopg2.Binary(dem_file.read())))
Пример #53
0
        polygon_algorithm[index][0] = algorithm
        polygon_algorithm[index][1] = polygon
        index += 1
    print 'total annotation number is %d' % index
    total_annotation_count = index
    print 'this polygon point count is %d' % len(polygon_algorithm[0][1])

    print '-- find all annotations NOT within another annotation  -- '
    polygon_algorithm_final = [[0 for y in xrange(3)] for x in xrange(200)]
    index3 = 0
    for index1 in range(0, total_annotation_count):
        algorithm = polygon_algorithm[index1][0]
        annotation = polygon_algorithm[index1][1]
        tmp_poly = [tuple(i) for i in annotation]
        annotation_polygon1 = Polygon(tmp_poly)
        annotation_polygon_1 = annotation_polygon1.buffer(0)
        polygonBound = annotation_polygon_1.bounds
        array_size = len(annotation)
        print '-----------------------------------------------------------------'
        print "annotation index %d and annotation point size %d" % (index1,
                                                                    array_size)
        is_within = False
        for index2 in range(0, total_annotation_count):
            annotation2 = polygon_algorithm[index2][1]
            tmp_poly2 = [tuple(j) for j in annotation2]
            annotation_polygon2 = Polygon(tmp_poly2)
            annotation_polygon_2 = annotation_polygon2.buffer(0)
            if index1 <> index2 and not annotation_polygon_1.equals(
                    annotation_polygon_2):
                if (annotation_polygon_1.within(annotation_polygon_2)):
                    is_within = True
Пример #54
0
 if ii % 100 == 0:
     sys.stderr.write('{}\n'.format(ii))
     sys.stderr.flush()
 rec = shaperec.record
 shp = shaperec.shape
 if opts.use_index:
     object_id = ii + 1
 else:
     object_id = rec.OBJECTID
 path_original = Path(shp.points)
 if opts.buffer is not None:
     poly_buffer = Polygon(shp.points).buffer(opts.buffer)
 else:
     poly_buffer = Polygon(shp.points)
 path_search = Path(
     np.array(poly_buffer.buffer(
         opts.radius).exterior.coords.xy).swapaxes(0, 1))
 flags = path_search.contains_points(np.hstack(
     (xp.reshape(-1, 1), yp.reshape(-1, 1))),
                                     radius=0.0).reshape(data_shape)
 if opts.debug or opts.check:
     flags_inside = []
     flags_near = []
     path_pixels = []
 inds = []
 rats = []
 err = False
 for ix, iy in zip(indx[flags], indy[flags]):
     xc = xp[iy, ix]
     yc = yp[iy, ix]
     pc = Point(xc, yc)
     poly_pixel = Polygon([
Пример #55
0
class HighZoomCity(City):
    
    def __init__(self, name, rank, zoom, population, geonameid, location, position, font):
        self.name = name
        self.rank = rank
        self.zoom = zoom
        self.population = population
        self.geonameid = geonameid
        self.location = location
        self.position = position

        self.buffer = 2
        
        self._original = deepcopy(position)
        self._label_shape = None
        
        self._width, self._height = font.getsize(self.name)

        self._update_label_shape()

    def __repr__(self):
        return '<H.Z. City: %s>' % self.name
    
    def __hash__(self):
        return id(self)

    def _update_label_shape(self):
        """
        """
        x, y = self.position.x, self.position.y
        
        x1, y1 = x - self._width/2, y - self._height/2
        x2, y2 = x + self._width/2, y + self._height/2
        
        self._label_shape = Polygon(((x1, y1), (x1, y2), (x2, y2), (x2, y1), (x1, y1)))
    
    def mask_shape(self):
        return self._label_shape.buffer(self.buffer).envelope
    
    def move(self):
        x = (random() - .5) * self._width
        y = (random() - .5) * self._height
    
        self.position.x = self._original.x + x
        self.position.y = self._original.y + y
        
        self._update_label_shape()
    
    def placement_energy(self):
        x = 2 * (self.position.x - self._original.x) / self._width
        y = 2 * (self.position.y - self._original.y) / self._width
        
        return hypot(x, y) ** 2
    
    def overlap_energy(self, other):
        if self.overlaps(other):
            return min(10.0 / self.rank, 10.0 / other.rank)

        return 0.0
    
    def in_range(self, other, reflexive=True):
        range = hypot(self._width + self.buffer*2, self._height + self.buffer*2)
        distance = hypot(self.position.x - other.position.x, self.position.y - other.position.y)
        in_range = distance <= range
        
        if reflexive:
            in_range |= other.in_range(self, False)

        return in_range
Пример #56
0
    def random_obstacles(self,
                         n,
                         form,
                         params=None,
                         heights=None,
                         properties=None,
                         etching=0,
                         on_area=None):
        '''
        Place random obstacles inside the shape.

        .. versionadded:: 0.4

        Parameters
        ----------
        n : int or float
            Number of obstacles if `n` is an :obj:`int`, otherwise represents
            the fraction of the shape's bounding box that should be occupied by
             the obstacles' bounding boxes.
        form : str or Shape
            Form of the obstacles, among "disk", "ellipse", "rectangle", or a
            custom shape.
        params : dict, optional (default: None)
            Dictionnary containing the instructions to build a predefined form
            ("disk", "ellipse", "rectangle"). See their creation methods for
            details. Leave `None` when using a custom shape.
        heights : float or list, optional (default: None)
            Heights of the obstacles. If None, the obstacle will considered as
            a "hole" in the structure, i.e. an uncrossable obstacle.
        properties : dict or list, optional (default: None)
            Properties of the obstacles if they constitue areas (only used if
            `heights` is not None). If not provided and `heights` is not None,
            will default to the "default_area" properties.
        etching : float, optional (default: 0)
            Etching of the obstacles' corners (rounded corners).
        '''
        form_center = None

        if heights is not None:
            if _unit_support:
                from .units import Q_
                if isinstance(heights, Q_):
                    heights = heights.m_as(self.unit)
                elif indexable(heights):
                    if isinstance(heights[0], Q_):
                        heights = [h.m_as(self.unit) for h in heights]

        # check n
        if not isinstance(n, np.integer):
            assert n <= 1, "Filling fraction (floating point `n`) must be "  +\
                           "smaller or equal to 1."

        # check form
        if form == "disk":
            form = self.disk(**params)
        elif form == "ellipse":
            form = self.ellipse(**params)
        elif form == "rectangle":
            form = self.rectangle(**params)
        elif not isinstance(form, (Polygon, MultiPolygon, Shape, Area)):
            raise RuntimeError("Invalid form: '{}'.".format(form))

        # get form center and center on (0, 0)
        xmin, ymin, xmax, ymax = form.bounds
        form_center = (0.5 * (xmax + xmin), 0.5 * (ymax + ymin))
        form_width = xmax - xmin
        form_height = ymax - ymin
        form_bbox_area = float((xmax - xmin) * (ymax - ymin))

        # get shape width and height
        xmin, ymin, xmax, ymax = self.bounds
        width = xmax - xmin
        height = ymax - ymin

        if not np.allclose(form_center, (0, 0)):
            form = translate(form, -form_center[0], -form_center[1])

        # create points where obstacles can be located
        locations = []
        on_width = int(np.rint(width / form_width))
        on_height = int(np.rint(height / form_height))
        x_offset = 0.5 * (width - on_width * form_width)
        y_offset = 0.5 * (height - on_height * form_height)

        for i in range(on_width):
            for j in range(on_height):
                x = xmin + x_offset + i * form_width
                y = ymin + y_offset + j * form_height
                locations.append((x, y))

        # get elected locations
        if not isinstance(n, np.integer):
            n = int(np.rint(len(locations) * n))

        indices = list(range(len(locations)))
        indices = np.random.choice(indices, n, replace=False)
        locations = [locations[i] for i in indices]

        # check heights
        same_prop = []
        if heights is not None:
            try:
                if len(heights) != n:
                    raise RuntimeError("One `height` entry per obstacle is "
                                       "required; expected "
                                       "{} but got {}".format(n, len(heights)))
                same_prop.append(np.allclose(heights, heights[0]))
            except TypeError:
                same_prop.append(True)
                heights = [heights for _ in range(n)]

        # check properties
        if isinstance(properties, dict):
            properties = (properties for _ in range(n))
            same_prop.append(True)
        elif properties is not None:
            assert len(properties) == n, \
                "One `properties` entry per obstacle is  required; " +\
                "expected {} but got {}".format(n, len(properties))
            same_prop.append(True)
            for dic in properties:
                same_prop[-1] *= (dic == properties[0])
        else:
            same_prop.append(True)
            properties = (self.areas["default_area"].properties.copy()
                          for _ in range(n))

        # make names
        num_obstacles = 0
        for name in self.areas:
            if name.find("obstacle_") == 0:
                num_obstacles += 1

        names = ["obstacle_{}".format(num_obstacles + i) for i in range(n)]

        # create the obstacles
        if heights is None:
            new_form = Polygon()
            for loc in locations:
                new_form = new_form.union(translate(form, loc[0], loc[1]))
            if etching > 0:
                new_form = new_form.buffer(-etching, cap_style=3)
                new_form = new_form.buffer(etching)
            self.add_hole(new_form)
        else:
            if np.all(same_prop):
                # potentially contiguous areas
                new_form = Polygon()
                h = next(iter(heights))
                prop = next(iter(properties))
                for loc in locations:
                    new_form = new_form.union(translate(form, loc[0], loc[1]))
                if etching > 0:
                    new_form = new_form.buffer(-etching, cap_style=3)
                    new_form = new_form.buffer(etching)
                if self.overlaps(new_form) or self.contains(new_form):
                    self.add_area(new_form,
                                  height=h,
                                  name="obstacle",
                                  properties=prop,
                                  override=True)
            else:
                # many separate areas
                prop = (locations, heights, names, properties)
                for loc, h, name, p in zip(*prop):
                    new_form = translate(form, loc[0], loc[1])
                    if etching > 0:
                        new_form = new_form.buffer(-etching, cap_style=3)
                        new_form = new_form.buffer(etching)
                    if h is None:
                        self.add_hole(new_form)
                    elif self.overlaps(new_form) or self.contains(new_form):
                        self.add_area(new_form,
                                      height=h,
                                      name=name,
                                      properties=p,
                                      override=True)
Пример #57
0
        family[parent]["children"].append(woeid)

print >>sys.stderr, "Merging %s stems" % len(family.keys())
for parent in family.keys():
    family[parent]['geom'] = cascaded_union([nbhds[child] for child in family[parent]['children']])


print >>sys.stderr, "Buffering stems."
for parent, feature in family.items():
    polygon = feature['geom']
    #print >>sys.stderr, "\r%s has shape of type %s" %(place_id, type(polygon))
    if type(polygon) is Polygon:
        polygon = Polygon(polygon.exterior.coords)
    else:
        polygon = MultiPolygon([Polygon(p.exterior.coords)for p in polygon.geoms])
    family[parent]['geom'] = polygon.buffer(0)
 
print >>sys.stderr, "Writing output."
features = []
for place_id, feature in family.items():
    features.append({
        "type": "Feature",
        "id": place_id,
        "geometry": feature['geom'].__geo_interface__,
        "properties": {"woe_id": place_id, "name": feature['name']}
    })

collection = {
    "type": "FeatureCollection",
    "features": features
}
Пример #58
0
class BaseTestCases(unittest.TestCase):
    def setUp(self):
        self.spatial_mode = 'geometry'
        self._setUp()
        self.manifest = Manifest()
        self.name = self.__class__.__name__.replace('TestCases', '')
        self.manifest.update({self.name: self.datasource(self.manifest)})
        self.spatial_geom = Polygon(self.spatial['coordinates'][0])

    def check_properties(self, asset, properties):
        for item in properties:
            equality = next(iter(properties[item]))
            comparison_operator = getattr(operator, equality)
            if not comparison_operator(asset[item],
                                       properties[item][equality]):
                return False
        return True

    def _setUp(self):
        raise NotImplementedError

    def test_pattern(self):
        # Testing that datasource implements proper pattern
        for source in self.manifest.sources:
            self.assertTrue(hasattr(source, 'execute'))
            self.assertTrue(hasattr(source, 'search'))
            self.assertTrue(hasattr(source, 'tags'))
            self.assertTrue(hasattr(source, 'stac_compliant'))

    def test_spatial_search(self):
        self.manifest.flush()
        self.manifest[self.name].search(self.spatial)
        response = self.manifest.execute()

        # Buffering the input geometry to account for small discrepencies in S2 (especially with large area searches)
        # This test passes if all returned geometries are within 3% of the average length of the polygon.
        buffered_geom = self.spatial_geom.buffer(0.03 *
                                                 self.spatial_geom.length / 4)

        # Confirming that each output feature intersects input
        for feat in response[self.name]['features']:
            if self.spatial_mode == 'geometry':
                asset_geom = Polygon(feat['geometry']['coordinates'][0])
            elif self.spatial_mode == 'extent':
                asset_geom = Polygon([[feat['bbox'][0], feat['bbox'][3]],
                                      [feat['bbox'][2], feat['bbox'][3]],
                                      [feat['bbox'][2], feat['bbox'][1]],
                                      [feat['bbox'][0], feat['bbox'][1]],
                                      [feat['bbox'][0], feat['bbox'][3]]])

            self.assertTrue(asset_geom.intersects(buffered_geom))

    def test_temporal_search(self):
        self.manifest.flush()
        self.manifest[self.name].search(self.spatial, self.temporal)

        response = self.manifest.execute()
        query = STACQuery(self.spatial, self.temporal)

        # Confirming that each output feature is within temporal window
        for feat in response[self.name]['features']:
            if len(feat['properties']['datetime']) == 10:
                year, month, day = feat['properties']['datetime'].split('-')
            else:
                year, month, day = feat['properties']['datetime'].split(
                    'T')[0].split('-')

            date_time = datetime.strptime(f"{year}-{month}-{day}", "%Y-%m-%d")

            self.assertTrue(query.check_temporal(date_time))

    def test_properties_search(self):
        self.manifest.flush()
        self.manifest[self.name].search(self.spatial,
                                        properties=self.properties)
        response = self.manifest.execute()

        # Confirming that output features ars filtered properly
        for feat in response[self.name]['features']:
            self.assertTrue(
                self.check_properties(feat['properties'], self.properties))

    def test_limit(self):
        # Confirming that the limit kwarg works
        self.manifest.flush()
        self.manifest[self.name].search(self.spatial, limit=self.limit)
        response = self.manifest.execute()
        self.assertLessEqual(len(response[self.name]['features']), self.limit)

    def test_stac_compliant(self):
        self.manifest.flush()
        self.manifest[self.name].search(self.spatial)
        response = self.manifest.execute()

        # Confirming that output features are STAC-compliant
        for feat in response[self.name]['features']:

            fd, path = tempfile.mkstemp()
            try:
                with os.fdopen(fd, 'w') as tmp:
                    json.dump(feat, tmp)

                stac = stac_validator.StacValidate(path)
                stac.run()
                try:
                    self.assertEqual(stac.status['items']['valid'], 1)
                except:
                    # TODO: figure out why this error happens
                    if 'Unresolvable JSON pointer' in stac.message[0][
                            'error_message']:
                        pass
                    else:
                        raise

            finally:
                os.remove(path)
Пример #59
0
def process_FEWSNET_IPC_data(shpfile: str, title: str):
    admin_boundaries_shapefile = "data/raw/FEWS/FEWSNET_World_Admin/FEWSNET_Admin2"
    sf_admin = shapefile.Reader(admin_boundaries_shapefile)
    colors = {
        0: "white",
        1: "#c3e2c3",
        2: "#f3e838",
        3: "#eb7d24",
        4: "#cd2026",
        5: "#5d060c",
        66: "aqua",
        88: "white",
        99: "white",
    }
    sf = shapefile.Reader(shpfile)

    fig, ax = plt.subplots(figsize=(12, 12))
    ax.set_aspect("equal")
    ax.set_title(title)
    plt.style.use("ggplot")

    def fill_and_plot(points, color_code):
        xs, ys = lzip(*points)
        ax.plot(xs, ys, linewidth=0.5, color="grey")
        ax.fill(xs, ys, color=colors[color_code])

    fs_polygons = []

    for i, sr in tqdm(enumerate(sf.shapeRecords())):
        nparts = len(sr.shape.parts)
        parts, points = sr.shape.parts, sr.shape.points
        CS = int(sr.record[0])
        if nparts == 1:
            # fill_and_plot(points, CS)
            fs_polygons.append((Polygon(points), int(sr.record[0])))
        else:
            for ip, part in enumerate(parts):
                if ip < nparts - 1:
                    i1 = parts[ip + 1] - 1
                else:
                    i1 = len(points)
                # fill_and_plot(points[part : i1 + 1], CS),
                fs_polygons.append(
                    (Polygon(points[part:i1 + 1]), int(sr.record[0])))

    south_sudan_srs = [
        sr for sr in sf_admin.shapeRecords() if sr.record[3] == "South Sudan"
    ]

    lines = []

    for sr in tqdm(south_sudan_srs, desc="South Sudan Counties"):
        county_polygon = Polygon(sr.shape.points)
        for fs_polygon in tqdm(fs_polygons, desc="fs_polygons"):
            if county_polygon.buffer(-0.05).intersects(fs_polygon[0]):
                centroid = county_polygon.centroid
                ax.text(
                    centroid.x,
                    centroid.y,
                    sr.record[8],
                    fontsize=6,
                    horizontalalignment="center",
                )
                xs, ys = lzip(*sr.shape.points)
                CS = int(fs_polygon[1])
                fill_and_plot(sr.shape.points, CS)
                lines.append("\t".join([str(x)
                                        for x in sr.record] + [str(CS)]))

    with open("ipc_data.tsv", "w") as f:
        f.write("\n".join(lines))

    plt.savefig("shape.pdf")
def get_tumor_intersect_flag(xmin, ymin, width, height, tile_offset_x,
                             tile_offset_y):
    x1 = float(tile_offset_x + xmin) / float(image_width)
    y1 = float(tile_offset_y + ymin) / float(image_height)
    x2 = float(tile_offset_x + xmin + width - 1) / float(image_width)
    y2 = float(tile_offset_y + ymin + height - 1) / float(image_height)
    patch_polygon_0 = [[x1, y1], [x2, y1], [x2, y2], [x1, y2], [x1, y1]]
    tmp_poly = [tuple(i1) for i1 in patch_polygon_0]
    tmp_polygon = Polygon(tmp_poly)
    patch_obj = tmp_polygon.buffer(0)

    is_tumor_patch = False
    is_non_tumor_patch = False
    patch_humanmarkup_intersect_polygon_tumor = Polygon([(0, 0), (1, 1),
                                                         (1, 0)])
    patch_humanmarkup_intersect_polygon_nontumor = Polygon([(0, 0), (1, 1),
                                                            (1, 0)])

    for humanMarkup in humanMarkupList_tumor:
        if (patch_obj.within(humanMarkup)):
            is_tumor_patch = True
            patch_humanmarkup_intersect_polygon_tumor = humanMarkup
            break
        elif (patch_obj.intersects(humanMarkup)):
            is_tumor_patch = True
            patch_humanmarkup_intersect_polygon_tumor = humanMarkup
            break

    for humanMarkup2 in humanMarkupList_non_tumor:
        if (patch_obj.within(humanMarkup2)):
            is_non_tumor_patch = True
            patch_humanmarkup_intersect_polygon_nontumor = humanMarkup2
            break
        elif (patch_obj.intersects(humanMarkup2)):
            is_non_tumor_patch = True
            patch_humanmarkup_intersect_polygon_nontumor = humanMarkup2
            break

    # not related to tumor or non tumor region
    if not is_tumor_patch and not is_non_tumor_patch:
        return 2

    # related to tumor but not non tumor region
    if is_tumor_patch and not is_non_tumor_patch:
        return 1

    # not related to tumor but related to non tumor region
    if not is_tumor_patch and is_non_tumor_patch:
        return 0

    if is_tumor_patch and is_non_tumor_patch:  # patch intersect with both tumor and non tumor region
        if patch_humanmarkup_intersect_polygon_tumor.within(
                patch_humanmarkup_intersect_polygon_nontumor
        ):  #tumor is within another non tumor region
            return 1
        elif patch_humanmarkup_intersect_polygon_nontumor.within(
                patch_humanmarkup_intersect_polygon_tumor
        ):  #non_tumor is within another tumor region
            return 0
        else:  #tumor and non tumor region intersects each other
            return 1