Esempio n. 1
0
def iou_bbox_with_yaw(vol_a, box_a, vol_b, box_b):
    """
    A simplified calculation of 3d bounding box intersection
    over union. It is assumed that the bounding box is only rotated
    around Z axis (yaw) from an axis-aligned box.

    :param vol_a, vol_b: precalculated obstacle volumes for comparison
    :param box_a, box_b: obstacle bounding boxes for comparison
    :return: iou float, intersection volume float
    """
    # height (Z) overlap
    min_h_a = np.min(box_a[2])
    max_h_a = np.max(box_a[2])
    min_h_b = np.min(box_b[2])
    max_h_b = np.max(box_b[2])
    max_of_min = np.max([min_h_a, min_h_b])
    min_of_max = np.min([max_h_a, max_h_b])
    z_intersection = np.max([0, min_of_max - max_of_min])
    if z_intersection == 0:
        return 0., 0.

    # oriented XY overlap
    xy_poly_a = Polygon(zip(*box_a[0:2, 0:4]))
    xy_poly_b = Polygon(zip(*box_b[0:2, 0:4]))
    xy_intersection = xy_poly_a.intersection(xy_poly_b).area
    if xy_intersection == 0:
        return 0., 0.

    intersection = z_intersection * xy_intersection
    union = vol_a + vol_b - intersection
    iou = intersection / union
    return iou, intersection
Esempio n. 2
0
def main():
	results = open('results.txt','w')
	parser = argparse.ArgumentParser(description='Runs text detector on relevant images at window level')
	parser.add_argument('classifier_file', help='Path to classifier CLF')
	parser.add_argument('-l', '--limit', type=int, metavar='COUNT', required=False, help='Maximum number of images to use')
	parser.add_argument('-r', '--random', action="store_true", default=False, required=False, help='Fetch images ordered randomly if limit is active')
	args = parser.parse_args()
	parameters["classifier_file"] = args.classifier_file
	parameters["evaluate_windows"] = True
	i = rigor.runner.Runner('text', parameters, limit=args.limit, random=args.random)
	results.write("threshold\timageid\texpected\tdetected\n")
	for cascade_threshold in (.5,):
		parameters['cascade_threshold'] = cascade_threshold
		for result in i.run():
			image_id = result[0]
			detected = result[1]
			undetected = result[2]
			expected_blob = Polygon()
			for expected in result[3]:
				expected_blob = expected_blob.union(Polygon(expected))
			for dbox in detected:
				box = Polygon(dbox)
				intersection = expected_blob.intersection(box)
				if intersection and (intersection.area / box.area) > parameters["minimum_percent_overlap"]:
					results.write("{}\t{}\t1\t1\t\n".format(cascade_threshold,image_id))
				else:
					results.write("{}\t{}\t1\t0\t\n".format(cascade_threshold,image_id))
			for dbox in undetected:
				box = Polygon(dbox)
				intersection = expected_blob.intersection(box)
				if intersection and (intersection.area / box.area) > parameters["minimum_percent_overlap"]:
					results.write("{}\t{}\t0\t1\t\n".format(cascade_threshold,image_id))
				else:
					results.write("{}\t{}\t0\t0\t\n".format(cascade_threshold,image_id))
Esempio n. 3
0
def findIntersect(inside_set):
        if (len(inside_set)==0):
                return [(0,0),(x_max,0),(x_max,y_max),(0,y_max),(0,0)]
        elif (len(inside_set)==1):
                #print "In 1"
                p1=Polygon(inside_set[0])
                return list(p1.exterior.coords)
        elif (len(inside_set)==2):
                #print "In 2"
                p1=Polygon(inside_set[0])
                p2=Polygon(inside_set[1])
                #if (p1.intersection(p2).area !=0):
                        #print p1.intersection(p2).area
                return list(p1.intersection(p2).exterior.coords)
                #else:
                #        return ([])
        else:
                #print "In >2"
                p1=Polygon(inside_set[0])
                p2=Polygon(inside_set[1])
                a=inside_set.pop(0)
                b=inside_set.pop(0)
                #if (p1.intersection(p2).area !=0):
                        #print p1.intersection(p2).area
                        #explain_validity(p1.intersection(p2))
                inside_set.insert(0,list(p1.intersection(p2).exterior.coords))
                return findIntersect(inside_set)
Esempio n. 4
0
def pointinside(lat, lon, shapefile):

    # Verifica todos os pontos que estão dentro do polígono
    # http://streamhacker.com/2010/03/23/python-point-in-polygon-shapely/
    # '/home/rodrigues/AmbientePython27/lib/python2.7/site-packages/PyFuncemeClimateTools/shp/pontos_ce.txt'

    # Ler os pontos do vértice e tranformar em um poligno

    poly = Polygon(shapefile)

    nlons = len(lon)
    nlats = len(lat)

    points_grid = []
    lonlat_grid = []

    array_bool = np.ones((nlats, nlons), dtype="bool")

    for xlon in range(0, nlons):

        for ylat in range(0, nlats):

            point = Point((lon[xlon], lat[ylat]))

            a = poly.contains(point)

            if a == True:

                array_bool[ylat, xlon] = False

                points_grid.append((ylat, xlon))

                lonlat_grid.append((lat[ylat], lon[xlon]))

    return points_grid, lonlat_grid, array_bool
def return_sample_list(num, regions, dist, scale):
    """
    Return a list containing the position (x,y) of the samples.
    Output is a list of lists: the ith list contains samples for
    the ith region.
    """
    sample_list = []
    for index, region in enumerate(regions):
        min_x = min([region[i][0] for i in xrange(len(region))])
        max_x = max([region[i][0] for i in xrange(len(region))])
        min_y = min([region[i][1] for i in xrange(len(region))])
        max_y = max([region[i][1] for i in xrange(len(region))])
        poly = Polygon(region)
        region_samples = []
        while len(region_samples) != num:
            if dist == "normal":            
                candidate = (np.random.normal(loc = (min_x + max_x)/2, scale = scale), \
                np.random.normal(loc = (min_y + max_y)/2, scale = scale))            
            if dist == "uniform":           
                candidate = (np.random.uniform(min_x, max_x),np.random.uniform(min_y, max_y))                        
            if poly.contains(Point(candidate)):            
                region_samples.append(candidate)
        sample_list.append(region_samples)
 
    print "-done sampling-"
    return sample_list    
Esempio n. 6
0
	def __create_grid(self):
		"""
		Create uniform grid over region.
		"""
		polygon_coords = self.__resample_polygon()
		
		# define polygon from polygon coords
		polygon = Polygon(polygon_coords)
		
		# get bounding box coordinates
		bounding_box = polygon.bounds
		min_lon = bounding_box[0]
		max_lon = bounding_box[2]
		min_lat = bounding_box[1]
		max_lat = bounding_box[3]
		
		# compute number of nodes along lat and lon
		n_lat = int(round((max_lat - min_lat) / self.grid_spacing)) + 1
		n_lon = int(round((max_lon - min_lon) / self.grid_spacing)) + 1
		
		# create grid of points inside polygon
		grid = []
		for i in range(n_lat):
			for j in range(n_lon):
				lat = min_lat + i * self.grid_spacing
				lon = min_lon + j * self.grid_spacing
				p = shapelyPoint(lon,lat)
				if polygon.contains(p) or polygon.touches(p):
					grid.append((lon,lat))
		return grid
Esempio n. 7
0
 def nms_discard(self, proposal, accepted_detections, dataframe):
 
   p_idx = proposal[0]
   p_label = proposal[1].index[0]
   p_xmin = dataframe.iloc[p_idx]['xmin']
   p_xmax = dataframe.iloc[p_idx]['xmax']
   p_ymin = dataframe.iloc[p_idx]['ymin']
   p_ymax = dataframe.iloc[p_idx]['ymax']
   p_poly = Polygon([(p_xmin,p_ymin), (p_xmax,p_ymin), (p_xmax,p_ymax), (p_xmin, p_ymax)])
 
   for detection in accepted_detections:
     detection = accepted_detections[detection]
     d_idx = detection[0]
     d_label = detection[1].index[0]
     if d_label != p_label:
       # No point checking if it isn't the same class of object
       continue
     else:
       d_xmin = dataframe.iloc[d_idx]['xmin']
       d_xmax = dataframe.iloc[d_idx]['xmax']
       d_ymin = dataframe.iloc[d_idx]['ymin']
       d_ymax = dataframe.iloc[d_idx]['ymax']
       d_poly = Polygon([(d_xmin,d_ymin), (d_xmax,d_ymin), (d_xmax,d_ymax), (d_xmin, d_ymax)])
  
       intersection = p_poly.intersection(d_poly)
       union = p_poly.union(d_poly)
       if intersection.area / union.area > 0.3:
         return True
         break
 
   return False
Esempio n. 8
0
def latlon2countycode(lat,lon):
	hits = []
	try:
		# Return a list of all counties that match lon OR lat
		hits = list(idx.intersection((lon,lat,lon,lat)))
		if len(hits) == 1:                  # Exact match
			recno = hits[0]
		elif len(hits) > 1:                 # Multiple candidate counties
        #	
		# For example, 42.66326054 -87.80922716 [3180, 3193] - either kenosha or racine county. 
			for hitIdx in hits:             # Search all counties in list
				county = shapes[hitIdx]     
				poly   = Polygon(county.points)
				if poly.contains(Point(lon,lat)):
					recno = hitIdx
					break
				else:                       
					continue				
		else:
			# if DEBUG:
			# 	sys.stderr.write("Error: Latlon2countycode unexpected error. -1\n")
			recno = -1
	except:
		# Lat/Lon don't match US county shapefiles
		recno = 0
	return recno
Esempio n. 9
0
def intersectNodes(path, srs, projName, projSRS, isGridProject, gridResolution):
    j = []
    isGridLine = False
    sf = shapefile.Reader(path)
    nodes = list(getShapelyNodes(projName))
    for shape in sf.shapes():
        shType = shape.shapeType
        # http://en.wikipedia.org/wiki/Shapefile#Shapefile_shape_format_.28.shp.29
        if shType == 5: # Polygon
            sh = Polygon(shape.points)
        elif shType == 3: # Line
            if isGridProject:
                sh = LineString(shape.points)
                isGridLine = True
            else:
                pass
        else:
            consoleAppend('Unknown shape type %s. Continue without access' %shType)

        if srs != projSRS:
            sh = shapelyReproject(sh, srs, projSRS)

        if isGridLine:
            sh = sh.buffer(gridResolution)

        for node in nodes:
            if sh.contains(node[1]): # node.geom
                j.append(node[0]) # node.node_id

    return j if j else None
Esempio n. 10
0
    def multipolygon_from_list(self, poly_list):
        
        multipolygon_list = []
        
        exterior = poly_list[0]
        exterior_polygon = Polygon(exterior)

        # Adding all polygons to a list of polygons
        polygons = []
        for i in range(1, len(poly_list)):
            
            # Appending the created polygon to the multipolygon list
            # check if polygon is within exterior outline:
            #     if TRUE add as polygon_coords to polygons
            #     if FALSE add as polygon to multipolygon
            polygon = Polygon(poly_list[i])
            
            if (polygon.within(exterior_polygon)):
                polygons.append(poly_list[i])
            else:
                multipolygon_list.append(polygon)

        polygon = Polygon(exterior, polygons)
        multipolygon_list.insert(0, polygon)
            
        # Extracting the first polygon as outline from the multipolygon array
        #exterior = multipolygon.pop(0)
        
        # Creating Shapely polygon from outline and remaining polygons
        #polygon = Polygon(exterior, multipolygon)
        
        multipolygon = MultiPolygon(multipolygon_list)
        return multipolygon
Esempio n. 11
0
    def pack_shape_scale_linear(self):
        center = self.random_point()
        base = self.base_shapes[0]
        ph = np.random.random() * 2 * np.pi
        R = np.matrix([[np.cos(ph), -np.sin(ph)], [np.sin(ph), np.cos(ph)]])
        rbase = base * R

        # linear search on scale to find best fit
        r = 0
        delta = 2 ** -4
        while True:
            p = Polygon(r * rbase + center)
            intersected = False
            for shape in self.shapes:
                for poly in shape:
                    if p.intersects(poly):
                        intersected = True
                        break
                if intersected:
                    break

            # if any([p.intersects(poly) for poly in polys]):
            if intersected:
                break
            r += delta
        print('  %f' % r)

        self.shapes.append(p)
Esempio n. 12
0
    def partition(self):
        partition_by_station_dict = dict()
        population_by_station_dict = dict()
        station_coordinates = []
        for station in self.stations.values():
            station_coordinates.append([station.lon, station.lat])
        points = np.array(station_coordinates)
        partitions = Voronoi(points)
        regions, vertices = LoadEstimator.voronoi_finite_polygons_2d(partitions)

        polygons = []
        for region in regions:
            vertices_coordinates = []
            for vertice_index in region:
                vertices_coordinates.append((vertices[vertice_index][0], vertices[vertice_index][1]))
            vertices_coordinates.append(vertices_coordinates[0])
            partition_polygon = Polygon(vertices_coordinates)
            polygons.append(partition_polygon)
            for station in self.stations.values():
                if Point(station.lon, station.lat).within(partition_polygon):
                    partition_by_station_dict[str(station.id)] = partition_polygon.intersection(self.boundary)
                    population_of_region = self.population_of_region(partition_polygon)
                    self.root.info('Region of station %s has %s people', str(station.id), str(population_of_region))
                    population_by_station_dict[str(station.id)] = population_of_region
                    break
        return partition_by_station_dict, population_by_station_dict
Esempio n. 13
0
    def get_reachable(self, coord, extra_coords):
        res = []
        for c in [(node.x, node.y) for node in self.nodes] + extra_coords:
            if c == coord:
                continue

            dirvec = (c[0]-coord[0], c[1]-coord[1])
            norm = (dirvec[0]**2 + dirvec[1]**2)**.5
            scl = self.uav_radius / norm
            norvecs = [(v[0]*scl, v[1]*scl) for v in [(-dirvec[1], dirvec[0]), (dirvec[1], -dirvec[0])]]
            corners = [
                (c[0]+norvecs[0][0], c[1]+norvecs[0][1]),
                (c[0]+norvecs[1][0], c[1]+norvecs[1][1]),
                (coord[0]+norvecs[0][0], coord[1]+norvecs[0][1]),
                (coord[0]+norvecs[1][0], coord[1]+norvecs[1][1])
            ]
            path_poly = Polygon(corners)

            # ls = LineString([coord, c])
            canReach = True
            for o in self.obstacles:
                # if ls.intersects(o):
                    # canReach = False
                if path_poly.intersects(o):
                    canReach = False
            #if not self.fly_zone.contains(ls):
                #canReach = False
            if canReach:
                res.append(c)
        return res
Esempio n. 14
0
def doPolygonize():
  blocks = polygonize(lines)
  writeBlocks(blocks, args[0] + '-blocks.geojson')

  blocks = polygonize(lines)
  bounds = Polygon([
    [minlng, minlat],
    [minlng, maxlat],
    [maxlng, maxlat],
    [maxlng, minlat],
    [minlng, minlat]
  ])
  # Geometry transform function based on pyproj.transform
  project = partial(
    pyproj.transform,
    pyproj.Proj(init='EPSG:3785'),
    pyproj.Proj(init='EPSG:4326'))
  print bounds
  print transform(project, bounds)

  print 'finding holes'
  for index, block in enumerate(blocks):
    if index % 1000 == 0:
      print "diff'd  %s" % (index)
    if not block.is_valid:
      print explain_validity(block)
      print transform(project, block)
    else:
      bounds = bounds.difference(block)
  print bounds
def compare_location_results(expected, found):
    true_positive = 0
    false_positive = 0
    false_negative = 0

    ambiguous = 0

    paired = [False]*len(found)

    for e in expected:
        p1=Polygon(reshape_list(e))
        total_matched = 0
        for idx,f in enumerate(found):
            p2=Polygon(reshape_list(f))
            try:
                x = p1.intersection(p2)
                if x.area/p1.area > 0.1:
                    paired[idx] = True
                    total_matched += 1
                    true_positive += 1
            except:
                pass # not sure what to do here
        if total_matched == 0:
            false_negative += 1
        elif total_matched > 1:
            ambiguous += 1

    for idx in range(len(found)):
        if not paired[idx]:
            false_positive += 1

    return {"tp":true_positive,"fp":false_positive,"fn":false_negative,"ambiguous":ambiguous}
Esempio n. 16
0
def generate_zones(poly_points, lon_unit, lat_unit, lons, lats):
    poly = Polygon(poly_points)
    zones = {}
    geo_json = {"type": "FeatureCollection", "features": []}
    for i, lon in enumerate(lons):
        for j, lat in enumerate(lats):
            leftBottom, rightBottom = (lon, lat), (lon + lon_unit, lat)
            rightTop, leftTop = (lon + lon_unit, lat + lat_unit), (lon, lat + lat_unit)
            polyPoints_gps = [leftBottom, rightBottom, rightTop, leftTop, leftBottom]
            cCoor_gps = (lon + lon_unit / 2.0, lat + lat_unit / 2.0)
            zone_poly = Polygon(polyPoints_gps)
            if poly.contains(zone_poly):
                boundary_relation = zone.IN
            elif poly.intersects(zone_poly):
                boundary_relation = zone.INTERSECT
            else:
                boundary_relation = zone.OUT
            zones[(i, j)] = zone(boundary_relation, i, j, cCoor_gps, polyPoints_gps)
            feature = {"type":"Feature",
                       "id": '%d#%d' % (i,j),
                       "properties": {"cCoor_gps": cCoor_gps},
                       "geometry":
                           {"type": "Polygon",
                            "coordinates": [[leftBottom,
                                            rightBottom,
                                            rightTop,
                                            leftTop,
                                            leftBottom
                                            ]]
                            }
                      }
            geo_json["features"].append(feature)
    return zones, geo_json
Esempio n. 17
0
def parse_modis_coordinates(url_xml, coordinates, verbose):
    upperleft = (float(coordinates.split(',')[0]), float(coordinates.split(',')[1]))  # lat, lon
    downright = (float(coordinates.split(',')[2]), float(coordinates.split(',')[3]))  # lat, lon
    upperright = (upperleft[0], downright[1])
    downleft = (downright[0], upperleft[1])
    requested_bbox = Polygon((upperleft, upperright, downright, downleft))
    if verbose:
        LOG.info("UL: LAT -> %s, LON -> %s" % upperleft)
        LOG.info("DR: LAT -> %s, LON -> %s" % downright)
    req = urllib2.Request("%s" % url_xml, None, HEADERS)
    print(url_xml)
    root = etree.parse(urllib2.urlopen(req))
    bbox = []
    for point in root.xpath('/GranuleMetaDataFile/GranuleURMetaData/SpatialDomainContainer/'
                            'HorizontalSpatialDomainContainer/GPolygon/Boundary/Point'):
        lon = point.xpath('./PointLongitude')
        lat = point.xpath('./PointLatitude')
        bbox.append((float(lat[0].text), float(lon[0].text)))
    product_bbox = MultiPoint(bbox).convex_hull
    if verbose:
        for point in bbox:
            (lat, lon) = point
            LOG.info("Point: LAT -> %s LON -> %s" % (lat, lon))

    if requested_bbox.intersects(product_bbox):
        LOG.info("Compatible")
        return True
    else:
        LOG.info("Not Compatible")
        return False
Esempio n. 18
0
def build_polygon(refs):
    coords = []

    for ref in refs:
        coord = coordsDB.get(str(ref))
        if coord:
            coord = map(float, coord.split(','))
            coords.append(coord)
        else:
            # for some reason coordinates are missing
            # this is usually because an extract cuts coordinates out
            return False

    if len(coords) > 2:
        # 3 point minimum for polygon
        # avoids common osm problems
        polygon = Polygon(coords)

        if polygon.is_valid:
            return polygon
        else:
            # 0.0 buffer cleans invalid polygons
            # they're invalid for many reasons, people prone problems
            return polygon.buffer(0.0)
    else:
        return False
Esempio n. 19
0
def triangulate_area_in_closed_curve(bndry_pts,xmax,xmin,ymax,ymin,N,vector_map):
    polygon = Polygon(bndry_pts)
    interior_pts = []
    dx = (xmax-xmin)/float(N+1)
    dy = (ymax-ymin)/float(N+1)
    x0 = xmin+dx
    y0 = ymin+dy
    for i in range(N):
        x = x0+float(i)*dx+random.uniform(-0.1,0.1)
        for j in range(N):
            y = y0+float(j)*dy+random.uniform(-0.1,0.1)
            pt = Point(x,y)
            if polygon.contains(pt):
                interior_pts.append((x,y))
    surface_pts = bndry_pts+interior_pts
    simplices = core.Triangulation(surface_pts)
    triangles = simplices.get_elements()
    pts = simplices.get_set()
    (u1,u2,u3) = vector_map(pts)
    indices = simplices.get_elements_indices()
    reduced_indices = []
    for index in indices:
        x1 = numpy.array(pts[index[0]])
        x2 = numpy.array(pts[index[1]])
        x3 = numpy.array(pts[index[2]])
        xcg = (x1+x2+x3)/3.0
        x1 = xcg+0.999*(x1-xcg)
        x2 = xcg+0.999*(x2-xcg)
        x3 = xcg+0.999*(x3-xcg)
        tmp = Polygon((x1,x2,x3))
        if polygon.contains(tmp):
            reduced_indices.append(index)
    return(triangular_mesh(u1,u2,u3,reduced_indices,line_width=0.25,tube_radius=0.005,representation='fancymesh'))
Esempio n. 20
0
def normalize_footprint(footprint: List[Tuple[float, float]]) -> MultiPolygon:
    """Split footprints which cross the anti-meridian

    Most applications, including RasterFoundry, cannot handle a single polygon representation
    of anti-meridian crossing footprint.
    Normalizing footprints by splitting them over the anti-meridian fixes cases where
    scenes appear to span all longitudes outside their actual footprint.
    If a footprint covers the anti-meridian, the shape is shifted 360 degrees, split,
    then the split section is moved back.
    """
    intersects = intersects_antimeridian(footprint)
    if not intersects:
        return MultiPolygon([Polygon(footprint)])
    else:
        shifted_footprint = Polygon([shift_point(p, 0, Side.RIGHT, False, 360) for p in footprint])
        left_hemisphere_mask = Polygon([(0, -90), (180, -90), (180, 90), (0, 90), (0, -90)])
        left_split = shifted_footprint.intersection(left_hemisphere_mask)
        right_split = Polygon([
            shift_point((x, y), 180, Side.LEFT, True, -360)
            for x, y
            in shifted_footprint.difference(left_hemisphere_mask).exterior.coords
        ])
        if left_split.area > 0 and right_split.area > 0:
            return MultiPolygon([left_split, right_split])
        elif left_split.area > 0:
            return MultiPolygon([left_split])
        elif right_split.area > 0:
            return MultiPolygon([right_split])
        else:
            return MultiPolygon([])
Esempio n. 21
0
def tri_generator(n):
    a = random2d()
    b = random2d()
    c = random2d()
    tlist = [(0, 1, 2)]
    plist = [a, b, c]
    trisequence = Polygon(plist)
    id = [1, 2]
    pcount = 2
    
    #initialization of the greedy path
    enterance = a[:]
    enterance , dis = seg_p_dis(enterance, b, c)
    greedy = dis
    elist = [enterance]
    #start routing
    for i in range(n-1):
        pcount = pcount + 1
        new = point_generator(trisequence, plist[id[0]], plist[id[1]])
        tri = Polygon([plist[id[0]], plist[id[1]], new])
        trisequence = trisequence.union(tri)
        tlist.append((id[0],id[1],pcount))
        plist.append(new)        
        id[randint(0,1)] = pcount
        if i< n-2 :
            enterance, dis = seg_p_dis(enterance, plist[id[0]], plist[id[1]])
            elist.append(enterance)
        else:
            dis = sum([(enterance[i]-new[i])**2 for i in range(2)])**.5
            elist.append(new)
        greedy = greedy + dis
    return plist, tlist, elist, greedy 
Esempio n. 22
0
    def test_attribute_chains(self):

        # Attribute Chaining
        # See also ticket #151.
        p = Polygon(((0.0, 0.0), (0.0, 1.0), (-1.0, 1.0), (-1.0, 0.0)))
        self.assertEqual(
            list(p.boundary.coords),
            [(0.0, 0.0), (0.0, 1.0), (-1.0, 1.0), (-1.0, 0.0), (0.0, 0.0)])

        ec = list(Point(0.0, 0.0).buffer(1.0, 1).exterior.coords)
        self.assertIsInstance(ec, list)  # TODO: this is a poor test

        # Test chained access to interiors
        p = Polygon(
            ((0.0, 0.0), (0.0, 1.0), (-1.0, 1.0), (-1.0, 0.0)),
            [((-0.25, 0.25), (-0.25, 0.75), (-0.75, 0.75), (-0.75, 0.25))]
        )
        self.assertEqual(p.area, 0.75)

        """Not so much testing the exact values here, which are the
        responsibility of the geometry engine (GEOS), but that we can get
        chain functions and properties using anonymous references.
        """
        self.assertEqual(
            list(p.interiors[0].coords),
            [(-0.25, 0.25), (-0.25, 0.75), (-0.75, 0.75), (-0.75, 0.25),
             (-0.25, 0.25)])
        xy = list(p.interiors[0].buffer(1).exterior.coords)[0]
        self.assertEqual(len(xy), 2)

        # Test multiple operators, boundary of a buffer
        ec = list(p.buffer(1).boundary.coords)
        self.assertIsInstance(ec, list)  # TODO: this is a poor test
Esempio n. 23
0
class Polygon(Geometry):
    """ class for convex polygon intersection test """
    def __init__(self, vertices):
        """ constructor for polygon, vertices must be specified
        in counter-clockwise order """

        # shapely should not be used if the students are implementing this
        self.poly = ShapelyPolygon(vertices)

    def intersects(self, geometry):
        if isinstance(geometry, Collection):
            return geometry.intersects(self)
        elif isinstance(geometry, Point):
            return self.point_poly_test(geometry)
        else:
            return self.poly_poly_test(geometry)

    def point_poly_test(self, p):
        """ This method should be implemented by the students but for
        demo purposes, shapely is used """
        return self.poly.intersects(p.point)

    def poly_poly_test(self, p):
        """ This method should be implemented by the students but for
        demo purposes, shapely is used """
        return self.poly.intersects(p.poly)

    @property
    def vertices(self):
        return list(self.poly.exterior.coords)
Esempio n. 24
0
def box3d_intersection(box_a, box_b):
    """
    A simplified calculation of 3d bounding box intersection.
    It is assumed that the bounding box is only rotated
    around Z axis (yaw) from an axis-aligned box.
    :param box_a, box_b: obstacle bounding boxes for comparison
    :return: intersection volume (float)
    """
    # height (Z) overlap
    min_h_a = np.min(box_a[2])
    max_h_a = np.max(box_a[2])
    min_h_b = np.min(box_b[2])
    max_h_b = np.max(box_b[2])
    max_of_min = np.max([min_h_a, min_h_b])
    min_of_max = np.min([max_h_a, max_h_b])
    z_intersection = np.max([0, min_of_max - max_of_min])
    if z_intersection == 0:
        return 0.

    # oriented XY overlap
    xy_poly_a = Polygon(zip(*box_a[0:2, 0:4]))
    xy_poly_b = Polygon(zip(*box_b[0:2, 0:4]))
    xy_intersection = xy_poly_a.intersection(xy_poly_b).area
    if xy_intersection == 0:
        return 0.

    return z_intersection * xy_intersection
Esempio n. 25
0
    def _center_pts(pts):
        '''Fancy label position generator, using erosion to get label coordinate'''
        min = pts.min(0)
        pts -= min
        max = pts.max(0)
        pts /= max

        #probably don't need more than 20 points, reduce detail of the polys
        if len(pts) > 20:
            pts = pts[::len(pts)//20]

        try:
            poly = Polygon([tuple(p) for p in pts])
            for i in np.linspace(0,1,100):
                if poly.buffer(-i).is_empty:
                    return list(poly.buffer(-last_i).centroid.coords)[0] * max + min
                last_i = i

            print("unable to find zero centroid...")
            return list(poly.buffer(-100).centroid.coords)[0] * max + min
        except:
            # This may not be worth being so verbose about... I think this is only for label positions.
            import warnings
            warnings.warn("Shapely error - computing mean of points instead of geometric center")
            return np.nanmean(pts, 0)
Esempio n. 26
0
def replace_lines_to_point(line_lyrs, old_point, new_point):
    x = old_point[0].coords[0][0]
    y = old_point[0].coords[0][1]
    buff = Polygon([(x-1, y-1), (x-1, y+1), (x+1, y+1), (x+1, y-1), (x-1, y-1)])
    buff.srid = old_point.srid

    for line_lyr in line_lyrs:
        # get intersection lines
        query = line_lyr.feature_query()
        #query.intersects(buff)
        query.geom()

        # replace point in lines
        for f in query():
            line = f.geom[0]
            new_line_points = []
            need_reconstruct = False
            for vertex in line.coords:
                if vertex == old_point[0].coords[0]:
                    new_line_points.append(new_point[0].coords[0])
                    need_reconstruct = True
                else:
                    new_line_points.append(vertex)

            if need_reconstruct:
                new_geom = MultiLineString([new_line_points,])
                f.geom = new_geom
                line_lyr.feature_put(f)
Esempio n. 27
0
def fix_geometry(geometry):
    """Attempts to fix an invalid geometry (from https://goo.gl/nfivMh)"""
    try:
        return geometry.buffer(0)
    except ValueError:
        pass

    polygons = geom_as_list(geometry)

    fixed_polygons = list()
    for i, polygon in enumerate(polygons):
        if not linear_ring_is_valid(polygon.exterior):
            continue

        interiors = []
        for ring in polygon.interiors:
            if linear_ring_is_valid(ring):
                interiors.append(ring)

        fixed_polygon = Polygon(polygon.exterior, interiors)

        try:
            fixed_polygon = fixed_polygon.buffer(0)
        except ValueError:
            continue

        fixed_polygons.extend(geom_as_list(fixed_polygon))

    if len(fixed_polygons) > 0:
        return MultiPolygon(fixed_polygons)
    else:
        return None
Esempio n. 28
0
def pg_pix2latlon_strdf(im_name):
    # load the polygon data for each region
    poly_path = './shpres/%s.json'%(im_name)
    if not os.path.isfile(poly_path):
        #print 'no shape files', im_name
        exit()

    #print 'shift', shift_dict[im_name]
    rs, cs = shift_dict[im_name]

    pg = simplejson.load(open(poly_path))
    #print pg

    exter = np.array(pg['ext'])
    exter[:,0] += cs
    exter[:,1] += rs
    ex_lonlat = pix2ll(exter,t)

    inters = pg['intlist']
    
    inter_list = []
    for inter in inters:
        np_inter = np.array(inter)
        np_inter[:,0] += cs
        np_inter[:,1] += rs
        np_inter = pix2ll(np_inter,t)
        inter_list.append(np_inter)
    
    pg_obj = Polygon(ex_lonlat, inter_list)
    print pg_obj.contains( Point(69.178746, 35.813774) )
    def __init__(self, robot_name, data_polygons):
        self.robot_name = robot_name
        self.open_area = Polygon()
        self.full_area = Polygon()


        for id_robot, pol_data in data_polygons.items():
            # pol_data = [polygon, closed, time]

            try:
                available = not pol_data[1]
                pol = Polygon(pol_data[0])

                # Invalid polygons cannot be join.
                if not pol.is_valid:
                    pol = MultiPoint(pol_data[0]).convex_hull

                if available or robot_name == id_robot:

                    self.open_area = self.open_area.union(pol)
                else:
                    self.full_area = self.full_area.union(pol)

            except Exception:
                print "Error Joining polygons", SystemError.message
Esempio n. 30
0
	def setStInShape(self,shpfile):
		"""
		Функция возвращает список станций попадающий в полигон(ы) из шэйпфайла файла
		"""
		import shapefile as shp
		import geocalc
		from shapely.geometry import Polygon,Point
		res=[]
		sf = shp.Reader(shpfile)
		for sp in sf.shapes():
			res_tmp=[]
			lonmin,latmin,lonmax,latmax=sp.bbox
			lonmin,lonmax=geocalc.cLon(lonmin),geocalc.cLon(lonmax)
			if lonmin<0 or lonmax<0:
				polygonPoints=[[geocalc.cLon(cors[0]),cors[1]] for cors in sp.points]
			else:
				polygonPoints=sp.points
			poly=Polygon(polygonPoints)
			indsInBox=[ind for ind in self.stInds if lonmin<=geocalc.cLon(self.stMeta[ind]['lon'])<=lonmax and latmin<=self.stMeta[ind]['lat']<=latmax]
			for ind in indsInBox:
				lat,lon=self.stMeta[ind]['lat'], geocalc.cLon(self.stMeta[ind]['lon'])
				pnt=Point(lon,lat)
				if poly.contains(pnt): res_tmp.append(ind)
			res=res+res_tmp
		return list(set(res))
Esempio n. 31
0
import math
import random
from typing import List

import numpy as np
from shapely.geometry import Point, Polygon

from election.g6.src.voter import Voter

num_parties = 3
triangle = Polygon([(0, 0), (1000, 0), (500, 500 * math.sqrt(3))])


def is_in_triangle(x: int, y: int):
    return triangle.contains(Point(x, y))


def get_normal(num_voters: int,
               mean_x,
               mean_y,
               std_x,
               std_y,
               seed: int = 1234,
               num_parties: int = 3,
               batch_size: int = 64) -> List[Voter]:
    # @TODO try generate distribution with numpy multivairate normal distribution
    np.random.seed(seed)
    voters = []
    while True:
        x_batch = np.random.normal(loc=mean_x,
                                   scale=std_x,
Esempio n. 32
0
    elif len(split) > 0 and split[0] == 'LABEL:':
        snag = True
    elif len(split) > 0 and split[0] == 'SIGNIFICANT':
        break
'''
for c in reversed(sorted(contours)):
	print c
'''
cMax = max(contours)
cMin = min(contours)

polys = {}
for c in coords:
    polys[c] = []
    for i in range(len(coords[c])):
        polys[c].append(Polygon(coords[c][i]))

fig = plt.figure(0)
for c in coords2:
    for i in range(len(coords2[c])):
        plt.plot(coords2[c][i]['x'], coords2[c][i]['y'], '-', color='black')
plt.savefig('test3.png')

#xGrid = np.arange(-102,-88,0.1)
#yGrid = np.arange(30,42,0.1)
xGrid = np.arange(-99, -91, 0.1)
yGrid = np.arange(33, 39, 0.1)
xGrid2 = np.arange(-99, -91, 0.01)
yGrid2 = np.arange(33, 39, 0.01)

x, y = np.meshgrid(xGrid, yGrid)
def main():
	#-- Read the system arguments listed after the program
	long_options=['DIR=','FILTER=','CLOBBER']
	optlist,arglist = getopt.getopt(sys.argv[1:],'D:F:C',long_options)

	#-- Set default settings
	subdir = 'atrous_32init_drop0.2_customLossR727.dir'
	FILTER = 0.
	flt_str = ''
	clobber = False
	for opt, arg in optlist:
		if opt in ("-D","--DIR"):
			subdir = arg
		elif opt in ("-F","--FILTER"):
			if arg not in ['NONE','none','None','N','n',0]:
				FILTER = float(arg)
				flt_str = '_%.1fkm'%(FILTER/1000)
		elif opt in ("-C","--CLOBBER"):
			clobber = True
			
	
	#-- Get list of files
	pred_dir = os.path.join(ddir,'stitched.dir',subdir)
	fileList = os.listdir(pred_dir)
	pred_list = [f for f in fileList if (f.endswith('.tif') and ('mask' not in f))]
	#-- output directory
	output_dir = os.path.join(pred_dir,'shapefiles.dir')
	#-- make directories if they don't exist
	if not os.path.exists(output_dir):
		os.mkdir(output_dir)
	#-- if CLOBBBER is False, we are not overwriting old files, so remove exisiting files from list
	if not clobber:
		print('Removing exisitng files.')
		existingList = os.listdir(output_dir)
		existing = [f for f in existingList if (f.endswith('.shp') and ('ERR' not in f) and f.startswith('gl_'))]
		rem_list = []
		for p in pred_list:
			if p.replace('.tif','%s.shp'%flt_str) in existing:
				#-- save index for removing at the end
				rem_list.append(p)
		for p in rem_list:
			print('Ignoring %s.'%p)
			pred_list.remove(p)
	
	# pred_list = ['gl_069_181218-181224-181224-181230_014095-025166-025166-014270_T110614_T110655.tif']
	# pred_list = ['gl_007_180518-180524-180530-180605_021954-011058-022129-011233_T050854_T050855.tif']
	print('# of files: ', len(pred_list))

	#-- threshold for getting contours and centerlines
	eps = 0.3

	#-- loop through prediction files
	#-- get contours and save each as a line in shapefile
	#-- also save training label as line
	for f in pred_list:
		#-- read file
		raster = rasterio.open(os.path.join(pred_dir,f),'r')
		im = raster.read(1)
		#-- get transformation matrix
		trans = raster.transform

		#-- also read the corresponding mask file
		mask_file = os.path.join(pred_dir,f.replace('.tif','_mask.tif'))
		print(mask_file)
		mask_raster = rasterio.open(mask_file,'r')
		mask = mask_raster.read(1)
		mask_raster.close()

		#-- get contours of prediction
		#-- close contour ends to make polygons
		im[np.nonzero(im[:,0] > eps),0] = eps
		im[np.nonzero(im[:,-1] > eps),-1] = eps
		im[0,np.nonzero(im[0,:] > eps)] = eps
		im[-1,np.nonzero(im[-1,:] > eps)] = eps
		contours = skimage.measure.find_contours(im, eps)
		#-- make contours into closed polyons to find pinning points
		#-- also apply noise filter and append to noise list
		x = {}
		y = {}
		noise = []
		pols = [None]*len(contours)
		pol_type = [None]*len(contours)
		for n,contour in enumerate(contours):
			#-- convert to coordinates
			x[n],y[n] = rasterio.transform.xy(trans, contour[:,0], contour[:,1])

			pols[n] = Polygon(zip(x[n],y[n]))
			#-- get elements of mask the contour is on
			submask = mask[np.round(contour[:, 0]).astype('int'), np.round(contour[:, 1]).astype('int')]
			#-- if more than half of the elements are from test tile, count contour as test type
			if np.count_nonzero(submask) > submask.size/2.:
				pol_type[n] = 'Test'
			else:
				pol_type[n] = 'Train'
		
		#-- Loop through all the polygons and taking any overlapping areas out
		#-- of the enclosing polygon and ignore the inside polygon
		ignore_list = []
		for i in range(len(pols)):
			for j in range(len(pols)):
				if (i != j) and pols[i].contains(pols[j]):
					pols[i] = pols[i].difference(pols[j])
					ignore_list.append(j)

		#-- loop through and apply noise filter
		for n in range(len(contours)):
			#-- apply filter
			if (n not in ignore_list) and (len(x[n]) < 2 or LineString(zip(x[n],y[n])).length <= FILTER):
				noise.append(n)

		#-- loop through remaining polygons and determine which ones are 
		#-- pinning points based on the width and length of the bounding box
		pin_list = []
		box_ll = [None]*len(contours)
		box_ww = [None]*len(contours)
		for n in range(len(contours)):
			box_ll[n] = pols[n].length
			box_ww[n] = pols[n].area/box_ll[n]
			if (n not in noise) and (n not in ignore_list):
				#-- make bounding box
				# box = pols[n].minimum_rotated_rectangle
				# bx,by = box.exterior.coords.xy
				# #-- get the dimensions of the sides of the box
				# edge_length = (Point(bx[0],by[0]).distance(Point(bx[1],by[1])), Point(bx[1],by[1]).distance(Point(bx[2],by[2])))
				#-- length is the larger dimension
				# box_ll = max(edge_length)
				# #-- width is the smaller dimension
				# box_ww = min(edge_length)
				#-- if the with is larger than 1/4 of the length, it's a pinning point
				if box_ww[n] > box_ll[n]/25:
					pin_list.append(n)

		#-- find overlap between ignore list nad noise list
		if len(list(set(noise) & set(ignore_list))) != 0:
			sys.exit('Overlap not empty: ', list(set(noise) & set(ignore_list)))

		#-- initialize list of contour linestrings
		er = [None]*len(contours)
		cn = [] #[None]*(len(contours)-len(ignore_list)-len(noise))
		n = 0  # total center line counter
		pc = 1 # pinning point counter
		lc = 1 # line counter
		er_type = [None]*len(er)
		cn_type = [] #[None]*len(cn)
		er_class = [None]*len(er)
		cn_class = [] #[None]*len(cn)
		er_lbl = [None]*len(er)
		cn_lbl = [] #[None]*len(cn)
		#-- loop through polygons, get centerlines, and save
		for idx,p in enumerate(pols):
			er[idx] = [list(a) for a in zip(x[idx],y[idx])]
			er_type[idx] = pol_type[idx]
			if idx in noise:
				er_class[idx] = 'Noise'				
			elif idx in ignore_list:
				er_class[idx] = 'Inner Contour'
			else:
				if idx in pin_list:
					#-- pinning point. Just get perimeter of polygon
					xc,yc = pols[idx].exterior.coords.xy
					cn.append([[list(a) for a in zip(xc,yc)]])
					cn_class.append(['Pinning Point'])
					cn_type.append([pol_type[idx]])
					#-- set label
					cn_lbl.append(['pin%i'%pc])
					pc += 1 #- incremenet pinning point counter
				else:
					#-- get centerlines
					attributes = {"id": idx, "name": "polygon", "valid": True}
					#-- loop over interpolation distances until we can get a single line
					dis = pols[idx].length/400	#100
					try:
						cl = Centerline(p,interpolation_distance=dis, **attributes)
					except:
						print('not enough ridges. Skip')
						continue
					else:
						#-- merge all the lines
						merged_lines = linemerge(cl)
						if merged_lines.geom_type == 'LineString':
							#-- save coordinates of linestring
							xc,yc = merged_lines.coords.xy
							cn.append([[list(a) for a in zip(xc,yc)]])
							cn_class.append(['Grounding Line'])
							cn_lbl.append(['line%i'%lc])
							cn_type.append([pol_type[idx]])
							er_class[idx] = 'GL Uncertainty'
							#-- set label
							er_lbl[idx] = 'err%i'%lc
							lc += 1 #- incremenet line counter
						else:
							nml = len(merged_lines)
							#-- for lines with many bifurcations, the average segment is 
							#-- about 300m, so if # of segments is length/300 or more, ignore.
							if nml < pols[idx].length/300:
								coord_list = []
								for nn in range(nml):
									xc,yc = merged_lines[nn].coords.xy
									coord_list.append([list(a) for a in zip(xc,yc)])
								cn.append(coord_list)
								cn_class.append(['Grounding Line']*nml)
								cn_lbl.append(['line%i'%lc]*nml)
								cn_type.append([pol_type[idx]]*nml)
								er_class[idx] = 'GL Uncertainty'
								#-- set label
								er_lbl[idx] = 'err%i'%lc
								lc += 1 #- incremenet line counter
		
		#-- save all linestrings to file
		#-- make separate files for centerlines and errors
		# 1) GL file
		gl_file = os.path.join(output_dir,f.replace('.tif','%s.shp'%flt_str))
		w = shapefile.Writer(gl_file)
		w.field('ID', 'C')
		w.field('Type','C')
		w.field('Class','C')
		#-- loop over contour centerlines
		for n in range(len(cn)):
			for nn in range(len(cn[n])):
				w.line([cn[n][nn]])
				w.record(cn_lbl[n][nn], cn_type[n][nn], cn_class[n][nn])
		w.close()
		# create the .prj file
		prj = open(gl_file.replace('.shp','.prj'), "w")
		prj.write(raster.crs.to_wkt())
		prj.close()

		# 2) Err File
		er_file = os.path.join(output_dir,f.replace('.tif','%s_ERR.shp'%flt_str))
		w = shapefile.Writer(er_file)
		w.field('ID', 'C')
		w.field('Type','C')
		w.field('Class','C')
		w.field('Length','C')
		w.field('Width','C')
		#-- loop over contours and write them
		for n in range(len(er)):
			w.line([er[n]])
			w.record(er_lbl[n] , er_type[n], er_class[n], box_ll[n], box_ww[n])
		w.close()
		# create the .prj file
		prj = open(er_file.replace('.shp','.prj'), "w")
		prj.write(raster.crs.to_wkt())
		prj.close()

		#-- close input file
		raster.close()
def find_homographies_per_thread(template: TemplateImage,
                                 test_image: TestImage, ratio_list: RatioList,
                                 homographies: [Homography], discarded_plots):
    good_matches, test_keypoints, template_keypoints = find_keypoints_and_good_matches(
        template, test_image.image)
    if Constants.SAVE:
        save_keypoints_report(good_matches, test_keypoints, test_image.image,
                              template.name)

    # =================================================
    #           TROVARE GLI HOT POINTS
    # =================================================
    hotpoints_image_after_elaboration, hotpoints_image, good_hotpoints = find_hotpoints(
        test_image.image, good_matches, test_keypoints)
    if Constants.SAVE:
        save_hotpoints(hotpoints_image_after_elaboration, template.name)
        save_hotpoints_report(test_image.image, good_matches, test_keypoints,
                              hotpoints_image_after_elaboration, template.name)

    starting_matches = len(good_matches)
    id_homography = 0
    id_hom_global = [0]

    if len(good_hotpoints) <= 0:
        return

    # order good contours depending on the size of area
    good_hotpoints = sorted(good_hotpoints, key=lambda x: x.area, reverse=True)

    # =================================================
    #                    PRIMA ISTANZA
    # =================================================

    first_hotpoint = good_hotpoints[0]
    window = first_hotpoint.generate_window()

    homography, good_matches, _, plots = find_homography_double_check(
        test_image.image,
        template,
        good_matches,
        window,
        test_keypoints,
        template_keypoints,
        discarded_plots,
        id_hom_global=id_hom_global,
        id_hotpoint=0,
        id_pos=0,
        id_homography=id_homography,
        ratios_list=ratio_list)
    discarded_plots = plots

    if homography is not None:
        id_homography += 1
        homographies.append(homography)

        # =================================================
        #           COMPUTE AGAIN HOTPOINT
        # =================================================
        # height, width, _ = test_image.get_image().shape  # 1000, 750, 3
        # test_image_polygon = Polygon([(0, 0), (width, 0), (width, height), (0, height)])
        remaining_matches = len(good_matches)
        matches_used = 1 - (remaining_matches / starting_matches)

        # optimize: qui si può migliorare aggiungendo altri fattori
        if matches_used > Constants.THRESHOLD_HOTPOINT_AGAIN:
            hotpoints_image_after_elaboration, hotpoints_image, good_hotpoints = find_hotpoints(
                test_image.image, good_matches, test_keypoints)

            if len(good_hotpoints) == 0:
                raise StopProcess

                # order good contours depending on the size of area
            good_hotpoints = sorted(good_hotpoints,
                                    key=lambda x: x.area,
                                    reverse=True)

            if Constants.SAVE:
                save_hotpoints(hotpoints_image_after_elaboration,
                               template.name,
                               again=True)

        # =================================================
        #           CICLA SU TUTTI GLI HOTPOINTS
        # =================================================

        first_obj_width = homographies[0].width
        first_obj_height = homographies[0].height
        for id_hotpoint, hotpoint in enumerate(good_hotpoints):
            # Analyze the neighbourhood of the hotpoint with chessboard 3x3
            max_position = 9

            for position in range(max_position):
                window = hotpoint.generate_window_with_chessboard(
                    width=first_obj_width,
                    height=first_obj_height,
                    position=position,
                    scale=Constants.WINDOW_SCALE)

                searching = True
                while searching:
                    homography, good_matches, _, plots = find_homography_double_check(
                        test_image.image,
                        template,
                        good_matches,
                        window,
                        test_keypoints,
                        template_keypoints,
                        discarded_plots,
                        id_hom_global=id_hom_global,
                        id_hotpoint=id_hotpoint,
                        id_pos=position,
                        id_homography=id_homography,
                        ratios_list=ratio_list)
                    discarded_plots = plots

                    if homography is not None:
                        id_homography += 1
                        homographies.append(homography)

                    else:
                        searching = False

    # =================================================
    #               BIG WINDOW SEARCH
    # =================================================
    height, width, _ = test_image.image.shape  # 1000, 750, 3
    window = Polygon([(0, 0), (width, 0), (width, height), (0, height)])

    searching = True
    while searching:
        homography, good_matches, _, plots = find_homography_double_check(
            test_image.image,
            template,
            good_matches,
            window,
            test_keypoints,
            template_keypoints,
            discarded_plots,
            id_hom_global=id_hom_global,
            id_hotpoint=-1,
            id_pos=-1,
            id_homography=id_homography,
            ratios_list=ratio_list,
            big_window=True)
        discarded_plots = plots

        if homography is not None:
            id_homography += 1
            homographies.append(homography)

        else:
            searching = False

    # =================================================
    #            FIND HOMOGRAPHIES OVERLAP
    # =================================================

    to_removes = remove_overlaps(
        homographies, lambda h1, h2: best_homography(h1, h2, test_image.image))

    for to_remove in to_removes:
        homographies.remove(to_remove)
Esempio n. 35
0
import pandas as pd
import uuid
import requests
import urllib.request
import time
from bs4 import BeautifulSoup
from shapely.geometry import Polygon
import uuid
from subprocess import call

bbox = pd.read_csv("bbox.csv").set_index("Unnamed: 0")
bbox = Polygon(
    bbox.values).buffer(0.1).boundary.minimum_rotated_rectangle.exterior.xy
bbox = ["{}%2C{}%7C".format(round(x, 4), round(y, 4)) for [x, y] in zip(*bbox)]
coords = "".join(bbox)

request_id = "extract_" + str(uuid.uuid4()).split("-")[0]
request_id
print("Requesting map {}...".format(request_id))
url = "https://extract.bbbike.org/?lang=en&format={}&city={}&email=fje86316%40aklqo.com&as=0.1&coords={}&oi=1&layers=B000T&submit=extrakt&expire={}&ref=download"
url = url.format("osm.gz", request_id, coords, round(time.time(), 0) + 3600)
print(url)
response = requests.get(url)

if response.ok:
    not_found = True
    max_tries = 30
    tries = 0
    while not_found and tries < max_tries:
        tries += 1
        url = "https://download.bbbike.org/osm/extract/?date=all"
    def detect_belanger(self,
                        overwrite=False,
                        max_extend=25,
                        thr_u=2.5,
                        thr_curv_vort=1 * 10**(-5)):
        out_file = self._working_dir + str(
            self._identifier) + '_detected_positions.nc'
        if overwrite and os.path.isfile(out_file):
            os.system('rm ' + out_file)
        elif overwrite == False and os.path.isfile(out_file):
            self._detected = da.read_nc(out_file)['detected']
            return self._detected

        def group_points(points):
            used_pos = []
            groups = []
            i = 0
            while len(used_pos) < len(points):
                p = points[i]
                i += 1
                if p not in used_pos:
                    used_pos.append(p)
                    group = [p]
                    for p in group:
                        yy, xx = p[0], p[1]
                        candidates = [[yy + ystep, xx + xstep]
                                      for ystep, xstep in permutations(
                                          np.arange(-1, 2, 1), 2)]
                        for pp in candidates:
                            if pp in points and pp not in used_pos:
                                group.append(pp)
                                used_pos.append(pp)

                    groups.append(group)

            return (groups)

        def group_extend(group):
            if len(group) < 2:
                return (0, [[np.nan, np.nan], [np.nan, np.nan]])
            max_square_distance = 0
            for pair in combinations(group, 2):
                dist = np.sqrt(
                    (self._lat[pair[0][0]] - self._lat[pair[1][0]])**2 +
                    (self._lon[pair[0][1]] - self._lon[pair[1][1]])**2)
                if dist > max_square_distance:
                    max_square_distance = dist
                    max_pair = pair
            return (max_square_distance, max_pair)

        # convert distances from degrees into grid-cells
        max_extend = self.degree_to_step(max_extend)

        detect = np.array([[np.nan] * 20])
        print('detecting\n10------50-------100')
        for t, progress in zip(
                self._time_i,
                np.array([['-'] + [''] * (len(self._time_i) / 20 + 1)] *
                         20).flatten()[0:len(self._time_i)]):
            sys.stdout.write(progress)
            sys.stdout.flush()

            asign = np.sign(self._CURV_VORT_ADVECT[t, :, :])
            signchange_y = ((np.roll(asign, 1, axis=0) - asign) !=
                            0).astype(int)
            signchange_x = ((np.roll(asign, 1, axis=1) - asign) !=
                            0).astype(int)
            y, x = np.where((signchange_y + signchange_x > 0)
                            & (self._U[t, :, :] < thr_u)
                            & (self._CURV_VORT[t, :, :] > thr_curv_vort))
            x = x[(5 < self._lat[y]) & (self._lat[y] < 35)]
            y = y[(5 < self._lat[y]) & (self._lat[y] < 35)]

            points = np.vstack((y, x)).T.tolist()
            groups = group_points(points)

            while True:
                done = True
                for group in groups:
                    done = True
                    if group_extend(group)[0] > max_extend:
                        done = False
                        groups.remove(group)
                        thresh = 5 * 10**(-6)
                        while True:
                            thresh *= 1.5
                            for pp in group:
                                if self._CURV_VORT[t, pp[0], pp[1]] < thresh:
                                    group.remove(pp)
                            sub_groups = group_points(group)
                            stop = True
                            for sub in sub_groups:
                                if group_extend(sub)[0] > max_extend:
                                    group = sub
                                    stop = False
                                else:
                                    groups.append(sub)
                            if stop:
                                break
                if done:
                    break

            for group in groups:
                if len(group) > 3:
                    dist, pair = group_extend(group)
                    x = [p[0] for p in group]
                    y = [p[1] for p in group]
                    centr = [sum(x) / len(group), sum(y) / len(group)]
                    max_vort = np.max([
                        self._CURV_VORT[t, int(pp[0]),
                                        int(pp[1])] for pp in group
                    ])
                    #tmp=[t,np.median(np.array(group)[:,0]),np.median(np.array(group)[:,1])]+pair[0]+pair[1]
                    tmp = [t] + centr + pair[0] + pair[1] + [
                        len(group), max_vort, self._U[t, centr[0], centr[1]]
                    ]
                    # save a simplified polygon
                    try:
                        hull = ConvexHull(group)
                        y, x = Polygon(np.array(group)[
                            hull.vertices, :]).simplify(1).exterior.xy
                        detect = np.concatenate(
                            (detect, np.array([tmp + list(y) + list(x)])))
                    except:
                        detect = np.concatenate(
                            (detect, np.array([tmp + [np.nan] * 10])))

        self._detected = da.DimArray(np.array(detect[1:, :]),
                                     axes=[
                                         range(detect.shape[0] - 1),
                                         [
                                             't', 'y', 'x', 'y_ext1', 'x_ext1',
                                             'y_ext2', 'x_ext2', 'members',
                                             'max_CURV_VORT', 'u_centroid',
                                             'y1', 'y2', 'y3', 'y4', 'y5',
                                             'x1', 'x2', 'x3', 'x4', 'x5'
                                         ]
                                     ],
                                     dims=['ID', 'z'])
        da.Dataset({'detected': self._detected}).write_nc(out_file, mode='w')
        print('\ndone')
        return self._detected
Esempio n. 37
0
###############################################################################
''' START MAIN CODE HERE '''

###############################################################################
# parse Domains shp and prep data
###############################################################################

domshp = 'ARUP_Background_NSHA18_Merged.shp'

print 'Reading source shapefile...'
sf = shapefile.Reader(domshp)
shapes = sf.shapes()
polygons = []
for poly in shapes:
    polygons.append(Polygon(poly.points))

# get field data
src_codes = get_field_data(sf, 'CODE', 'str')
src_names1 = get_field_data(sf, 'Name', 'str')
src_names2 = get_field_data(sf, 'SRC_NAME', 'str')
domains = get_field_data(sf, 'DOMAIN', 'float')
mmax1 = get_field_data(sf, 'max_mag', 'float')
mmax2 = get_field_data(sf, 'MMAX_BEST', 'float')
trt = get_field_data(sf, 'TRT', 'str')
usd = get_field_data(sf, 'usd', 'float')
lsd = get_field_data(sf, 'lsd', 'float')
hd = get_field_data(sf, 'hd1', 'float')
stk = get_field_data(sf, 'strike1', 'float')
dip = get_field_data(sf, 'dip1', 'float')
rke = get_field_data(sf, 'rake1', 'float')
Esempio n. 38
0
def make_bin_gdf_from_rupture_gdf(
    rupture_gdf: gpd.GeoDataFrame,
    h3_res: int = 3,
    parallel: bool = True,
    n_procs: Optional[int] = None,
    min_mag: Optional[float] = 6.0,
    max_mag: Optional[float] = 9.0,
    bin_width: Optional[float] = 0.2,
) -> gpd.GeoDataFrame:
    """
    Takes all of the ruptures, finds the `h3` spatial bins for each, and then
    makes a new `GeoDataFrame` of all of the spatial bins, with
    :class:`~openquake.hme.utils.bins.SpacemagBin` initialized for each of the
    bins.

    :param rupture_gdf:
        `GeoDataFrame` with all of the ruptures.

    :param h3_res:
        Resolution for the `h3` bins (spatial cells). See
        https://uber.github.io/h3/#/ for more information. Defaults to "3",
        which seems good for earthquake analysis. Larger numbers are smaller
        grid cells.

    :param parallel:
        Boolean flag determining whether to find the spatial indices of the
        ruptures in parallel or not. The parallel algorithm has a lot of
        overhead and will be slower with a relatively low number of
        ruptures (hundreds of thousands or so, depending on the computer).

    :param n_procs:
        Number of simultaneous processes run if `parallel` is `True`. Defaults
        to os.cpu_count() - 1 for multicore systems.

    :param min_mag:
        Minimum magnitude of the :class:`~openquake.hme.utils.bins.SpacemagBin`

    :param max_mag:
        Maximum magnitude of the :class:`~openquake.hme.utils.bins.SpacemagBin`

    :param bin_width:
        Width of the :class:`~openquake.hme.utils.bins.SpacemagBin` bins in
        magnitude units.

    """

    n_procs = n_procs or _n_procs

    logging.info("starting rupture-bin spatial join")

    if (parallel is False) or (n_procs == 1):
        rupture_gdf["bin_id"] = list(
            tqdm(
                map(
                    partial(_h3_bin_from_rupture, h3_res=h3_res),
                    rupture_gdf["rupture"].values,
                ),
                total=rupture_gdf.shape[0],
            ))
    else:
        with Pool(n_procs) as pool:
            rupture_gdf["bin_id"] = tqdm(
                pool.imap(
                    partial(_h3_bin_from_rupture, h3_res=h3_res),
                    rupture_gdf["rupture"].values,
                    chunksize=500,
                ),
                total=rupture_gdf.shape[0],
            )

    logging.info("finished rupture-bin spatial join")

    hex_codes = list(set(rupture_gdf["bin_id"].values))

    polies = [
        Polygon(h3.h3_to_geo_boundary(hex_code, geo_json=True))
        for hex_code in hex_codes
    ]

    bin_gdf = gpd.GeoDataFrame(index=hex_codes,
                               geometry=polies,
                               crs={
                                   "init": "epsg:4326",
                                   "no_defs": True
                               })
    bin_gdf = make_SpacemagBins_from_bin_gdf(bin_gdf,
                                             min_mag=min_mag,
                                             max_mag=max_mag,
                                             bin_width=bin_width)
    return bin_gdf
 def world_polygon(self):
     return Polygon([(-180, 90), (180, 90), (180, -90), (-180, -90),
                     (-180, 90)])
Esempio n. 40
0
with open(cleanedBIMFile_w_coord_Name, 'r') as addrfile, open(
        resultBIMFileName, 'w+', newline='') as resultBIMFile, open(
            RegionBoundaryFileName) as RegionBoundaryFile, open(
                BuildingFootPrintsFileName) as BuildingFootPrintsFile:

    addrcsv = list(csv.reader(addrfile))
    colNames = addrcsv[0][0:]
    addrcsv = addrcsv[1:]
    coordsAll = []
    [coordsAll.append([row[1], row[0]]) for row in addrcsv]
    coordsAll = np.array(coordsAll, dtype=np.float32)
    kdTree = spatial.KDTree(coordsAll)

    boundjsn = json.load(RegionBoundaryFile)
    pts = boundjsn["features"][0]['geometry']['coordinates'][0]
    boundary = Polygon(pts)

    fps = []
    bldgFootPrints = json.load(BuildingFootPrintsFile)
    bldgFootPrintsFeatures = bldgFootPrints["features"]
    bldgFootPrintsFeatures = bldgFootPrintsFeatures[0:]
    features = []
    for bfpf in bldgFootPrintsFeatures:
        #print(bfpf)
        pts = bfpf['geometry']['coordinates'][0]
        #fps.append(Polygon(pts))
        bimIndex = getBIMIndex(pts)
        info = addrcsv[bimIndex]
        for i in range(len(colNames)):
            vName = colNames[i]
            if info[i] == 'None' or info[i] == '':
Esempio n. 41
0
def evaluate(annot_load,
             dects_load,
             annot_filter=None,
             confidence_thresh=None,
             iou_thresh=0.5):
    '''
    The logic has something wrong, see below.
    TODO: add _assignment flag to skip matched target
    '''
    print('Start Evaluations ......')
    true_dict, gt_pos = annot_load
    pred_dict, pr_pos = dects_load

    true_dict, gt_pos = confidence_filter(true_dict, annot_filter)
    pred_dict, pr_pos = confidence_filter(pred_dict, confidence_thresh)

    tp = 0
    img_list = list(true_dict.keys())
    img_list.sort()
    for nameID in img_list:
        # print("Processing image: " + nameID)
        # print("  >>> true_dict", true_dict[nameID])
        # print("  >>> pred_dict", pred_dict[nameID])

        ### filter for illegal box
        if not nameID in pred_dict.keys():
            continue

        # _true_dict = []
        # for true_obj in true_dict[nameID]:
        #     if len(true_obj) < 3:
        #         pr_pos -= 1
        #         continue
        #     else:
        #         _true_dict.append(true_obj)
        # if len(_true_dict) == 0:
        #     contionue

        pred_polys = [Polygon(pred_obj) for pred_obj in pred_dict[nameID]]
        true_polys = [Polygon(true_obj) for true_obj in true_dict[nameID]]
        '''
        Warning:    the logic of pred/true has something wrong.
                    since no _assignment flag to annotate those true poly that hit the pred poly
                    so the recall and precision will be over-estimated, so do F-score.
        '''
        for true_poly in true_polys:
            for pred_poly in pred_polys:
                iou = 0
                try:
                    inter = true_poly.intersection(pred_poly).area
                    union = true_poly.area + pred_poly.area - inter
                    iou = inter / union
                except:
                    print("Topological Error! IOU set to 0")
                    iou = 0
                if iou > iou_thresh:
                    tp += 1
                    break

    p = 1.0 * tp / pr_pos
    r = 1.0 * tp / gt_pos
    f = 2.0 * p * r / (p + r)

    print("#GTbox:{}\t#Dectbox:{}\t#hits:{}".format(gt_pos, pr_pos, tp))
    print("Precision: \t %.3f" % p)
    print("Recall: \t %.3f" % r)
    print("F-Score:  \t %.3f" % f)
def process_maps(contacts_shp,
                 outpath,
                 datafolder,
                 sidx_fname,
                 boundaries_shp,
                 imt_str,
                 inland_shp,
                 models_list=None,
                 only_buffers=False):
    """
    This function processes all the models listed in the mosaic.DATA
    dictionary. The code creates for the models in contact with other models
    a file with the points outside of the buffer area

    :param str contacts_shp:
        The shapefile containing the contacts between models
    :param str outpath:
        The folder where results are stored
    :param str datafolder:
        The path to the folder containing the mosaic data
    :param str sidx_fname:
        The name of the file containing the rtree spatial index
    :param str boundaries_shp:
        The name of the shapefile containing the polygons of the countries
    :param str imt_str:
        The string defininig the IMT used for the homogenisation of results
    :param str inland_shp:
        The name of the shapefile defining inland areas
    :param str:
        [optional] A list of models IDs
    """
    shapely.speedups.enable()
    #
    # Checking output directory
    if os.path.exists(outpath):
        lst = glob.glob(os.path.join(outpath, '*.json'))
        lst += glob.glob(os.path.join(outpath, '*.txt'))
        if len(lst):
            raise ValueError('The code requires an empty folder')
    else:
        os.mkdir(outpath)
    # Read the shapefile with the contacts between models
    contacts_df = gpd.read_file(contacts_shp)
    # Read the shapefile with inland areas
    inland_df = gpd.read_file(inland_shp)
    # Load the spatial index
    sidx = index.Rtree(sidx_fname)
    #
    # Get the list of the models from the data folder
    if models_list is None:
        models_list = []
        for key in mosaic.DATA.keys():
            models_list.append(re.sub('[0-9]+', '', key))
    #
    # Loop over the various models
    buf = 0.6
    header_save = None
    imts_save = None
    for i, key in enumerate(sorted(mosaic.DATA)):

        buffer_data = {}
        buffer_poes = {}
        coords = {}

        # Skip models not included in the list
        if re.sub('[0-9]+', '', key) not in models_list:
            continue
        # Find name of the file with hazard curves
        print_model_info(i, key)
        data_fname = find_hazard_curve_file(datafolder, key, imt_str)
        # Read hazard curves
        map_gdf, header = get_hcurves_geodataframe(data_fname[0])
        # Check the stability of information used. TODO we should also check
        # that the IMTs are always the same
        if header_save is None:
            header_save = header
        else:
            for obtained, expected in zip(header, header_save):
                # print(obtained, expected)
                # assert obtained == expected
                pass
        # Create the list of column names with hazard curve data. These are
        # the IMLs
        poelabs = [l for l in map_gdf.columns.tolist() if re.search('^poe', l)]
        imts = get_imtls(poelabs)
        if len(poelabs) < 1:
            raise ValueError('Empty list of column headers')
        # Check the IMLs used
        if imts_save is None:
            imts_save = imts
        else:
            np.testing.assert_allclose(imts_save, imts, rtol=1e-5)
        # Fixing an issue at the border between waf and ssa
        # TODO can we remove this now?
        if key in ['waf18', 'ssa18']:
            from shapely.geometry import Polygon
            coo = get_poly_from_str(mosaic.SUBSETS[key]['AO'][0])
            df = pd.DataFrame({'name': ['tmp'], 'geo': [Polygon(coo)]})
            dft = gpd.GeoDataFrame(df, geometry='geo')
            idx = map_gdf.geometry.intersects(dft.geometry[0])
            xdf = copy.deepcopy(map_gdf[idx])
            map_gdf = xdf
        # Read the shapefile with the polygons of countries. The explode
        # function converts multipolygons into a single multipolygon.
        tmpdf = gpd.read_file(boundaries_shp)
        inpt = explode(tmpdf)
        inpt['MODEL'] = key
        # Select polygons composing the given model and merge them into a
        # single multipolygon.
        selection = create_query(inpt, 'FIPS_CNTRY', mosaic.DATA[key])
        one_polygon = selection.dissolve(by='MODEL')
        # Now we process the polygons composing the selected model
        for poly in one_polygon.geometry:
            # Checking the contacts between the current model and the
            # surrounding ones as specified in the contacts_df geodataframe
            c = 0
            for la, lb, geo in zip(contacts_df.modelA, contacts_df.modelB,
                                   contacts_df.geometry):
                if key.upper() in [la, lb]:
                    print('    ', la, lb)
                    # Index of the points in the buffer. The buffer
                    # includes the country boundary + buffer distance.
                    # map_gdf is a dataframe with the hazard data.
                    idx = map_gdf.geometry.intersects(geo.buffer(buf))
                    # Key defining the second model
                    other = lb
                    if key.upper() == lb:
                        other = la
                    # Create the polygon covering the second model
                    selection = create_query(inpt, 'FIPS_CNTRY',
                                             mosaic.DATA[other.lower()])
                    other_polygon = selection.dissolve(by='MODEL')
                    if not len(other_polygon):
                        raise ValueError('Empty dataframe')
                    # Create a dataframe with just the points in the buffer
                    # and save the distance of each point from the border
                    tmpdf = copy.deepcopy(map_gdf[idx])
                    tmpdf.crs = {'init': 'epsg:4326'}
                    tmpdf = gpd.sjoin(tmpdf,
                                      inland_df,
                                      how='inner',
                                      op='intersects')
                    dst = tmpdf.distance(geo)
                    tmpdf = tmpdf.assign(distance=dst)
                    # Select the points contained in the buffer and belonging
                    # to the other model. These points are labelled.
                    g = other_polygon.geometry[0]
                    idx_other = tmpdf.geometry.intersects(g)
                    tmpdf = tmpdf.assign(outside=idx_other)
                    tmpdf.outside = tmpdf.outside.astype(int)
                    # Update the polygon containing just internal points i.e.
                    # points within the model but outside of the buffers. The
                    # points in the buffer but outside the model are True.
                    poly = poly.difference(geo.buffer(buf))
                    # Write the data in the buffer between the two models
                    fname = 'buf{:d}_{:s}.json'.format(c, key)
                    fname = os.path.join(outpath, fname)
                    if len(tmpdf):
                        tmpdf.to_file(fname, driver='GeoJSON')
                    else:
                        warnings.warn('Empty dataframe', RuntimeWarning)
                    # Update the counter of the points in the buffer and
                    # store hazard curves and their position (i.e. inside
                    # or outside the polygon of a model)
                    c += 1
                    for iii, (p, d, o) in enumerate(
                            zip(tmpdf.geometry, tmpdf['distance'],
                                tmpdf['outside'])):
                        # pidx = tmpdf.index.values[iii]
                        # get only poes for the various IMLs
                        tmp = tmpdf[poelabs]
                        poe = tmp.iloc[iii].values
                        # Using rtree we find the closest point on the
                        # reference grid. Check that there is a single index.
                        res = list(sidx.nearest((p.x, p.y, p.x, p.y), 1))
                        if len(res) > 1:
                            msg = 'The number of indexes found is larger '
                            msg += 'than 1'
                            print('Indexes:', res)
                            raise ValueError(msg)
                        # Update the information for the reference point
                        # found. The buffer_data dictionary contains
                        # distance and position information of the point
                        # in the buffer
                        if res[0] in buffer_data:
                            buffer_data[res[0]].append([d, o])
                            buffer_poes[res[0]].append(poe)
                        else:
                            buffer_data[res[0]] = [[d, o]]
                            buffer_poes[res[0]] = [poe]
                            coords[res[0]] = [p.x, p.y]
            # idx is a series of booleans
            if not only_buffers:
                df = pandas.DataFrame({'Name': [key], 'Polygon': [poly]})
                gdf = gpd.GeoDataFrame(df, geometry='Polygon')
                within = gpd.sjoin(map_gdf, gdf, op='within')
                fname = os.path.join(outpath, 'map_{:s}.json'.format(key))
                within.to_file(fname, driver='GeoJSON')
        #
        # Storing temporary files
        tmpdir = os.path.join(outpath, 'temp')
        if not os.path.exists(tmpdir):
            os.mkdir(tmpdir)
        #
        fname = os.path.join(tmpdir, '{:s}_data.pkl'.format(key))
        fou = open(fname, "wb")
        pickle.dump(buffer_data, fou)
        fou.close()
        #
        fname = os.path.join(tmpdir, '{:s}_poes.pkl'.format(key))
        fou = open(fname, "wb")
        pickle.dump(buffer_poes, fou)
        fou.close()
        #
        fname = os.path.join(tmpdir, '{:s}_coor.pkl'.format(key))
        fou = open(fname, "wb")
        pickle.dump(coords, fou)
        fou.close()

    buffer_processing(outpath, datafolder, sidx_fname, imt_str, models_list,
                      poelabs, buf)
Esempio n. 43
0
def getMaskFromOSM(city='Berlin', storage=''):
    """
        Creates a mask for the Preprocessing from OSM data
    
        input:
            city: city for which the mask should be created
            storage: mask storage folder
            
        output:
            mask: created OSM mask
    """

    # Coordinates of the bounding boxes
    if city == 'Berlin':
        yMin = 52.359
        yMax = 52.854
        xMin = 13.189
        xMax = 13.625
    elif city == 'Moscow':
        yMin = 55.506
        yMax = 55.942
        xMin = 37.357
        xMax = 37.852
    elif city == 'Istanbul':
        yMin = 40.810
        yMax = 41.305
        xMin = 28.794
        xMax = 29.230

    # Load street data
    G = ox.graph_from_bbox(yMax,
                           yMin,
                           xMax,
                           xMin,
                           network_type='drive',
                           truncate_by_edge=True)

    # Convert street data to GeoDataFrame
    nodes, edges = ox.graph_to_gdfs(G)

    # Size of the grid cells
    length = 0.001

    # Bounding boxes of the grid cells
    cols = np.arange(xMin, xMax, length)
    rows = np.arange(yMin, yMax, length)

    polygons = []
    xList = []
    yList = []

    # Build grid cells representing the pixels of the traffic4cast images
    for x in cols:
        for y in rows:
            polygons.append(
                Polygon([(x, y), (x + length, y), (x + length, y + length),
                         (x, y + length)]))
            xList.append(int(np.round(1000 * (x - xMin))))
            yList.append(int(np.round(1000 * (y - yMin))))

    grid = gpd.GeoDataFrame({'geometry': polygons, 'x': xList, 'y': yList})

    edges.crs = grid.crs

    # intersect road data with grid
    joined = gpd.sjoin(edges, grid, op='intersects')

    mask = np.zeros((495, 436))

    # Build a mask from intersections (+ rotate data to fit desired output)
    if city == 'Moscow':
        for idx, row in joined.iterrows():
            mask[row.x, row.y] = 1
        mask = np.flip(mask)
    else:
        for idx, row in joined.iterrows():
            mask[row.y, row.x] = 1
        mask = np.flip(mask, 0)

    mask = (mask > 0)

    #Save mask
    path = os.path.join(storage, city + '.mask')
    pickle.dump(mask, open(path, 'wb'))

    return mask
Esempio n. 44
0
def test_init():
    record = GeometryRecord("AenderungMitVorwirkung",
                            datetime.date(1985, 8,
                                          29), None, Polygon(), 'test')
    assert isinstance(record.law_status, str)
    assert isinstance(record.published_from, datetime.date)
    assert isinstance(record.geo_metadata, str)
    assert isinstance(record.geom, Polygon)
    assert record.public_law_restriction is None


@pytest.mark.parametrize(
    'geom,dim', [(Point(0, 0), 0), (MultiPoint([Point(0, 0)]), 0),
                 (LineString([(0, 0), (1, 1)]), 1),
                 (MultiLineString([LineString([(0, 0), (1, 1)])]), 1),
                 (Polygon([(0, 0), (1, 1), (1, 0)]), 2),
                 (MultiPolygon([Polygon([(0, 0), (1, 1), (1, 0)])]), 2),
                 (GeometryCollection([Polygon([(0, 0), (1, 1), (1, 0)])]), 3),
                 ('nogeom', -1)])
def test_geom_dim(geom, dim):
    assert GeometryRecord.geom_dim(geom) == dim


@pytest.mark.parametrize(
    'input_geom,result,extracted',
    [(Point(0, 0), Point(0, 0), Point(0, 0)),
     (Point(0, 0),
      GeometryCollection([
          Point(0, 0),
          LineString([(0, 0), (1, 1)]),
          Polygon([(0, 0), (1, 1), (1, 0)])
Esempio n. 45
0
def blocks(cells, streets, buildings, id_name, unique_id):
    """
    Generate blocks based on buildings, tesselation and street network

    Adds bID to buildings and tesselation.

    Parameters
    ----------
    cells : GeoDataFrame
        GeoDataFrame containing morphological tessellation
    streets : GeoDataFrame
        GeoDataFrame containing street network
    buildings : GeoDataFrame
        GeoDataFrame containing buildings
    id_name : str
        name of the unique blocks id column to be generated
    unique_id : str
        name of the column with unique id. If there is none, it could be generated by unique_id().
        This should be the same for cells and buildings, id's should match.

    Returns
    -------
    buildings, cells, blocks : tuple

    buildings : GeoDataFrame
        GeoDataFrame containing buildings with added block ID
    cells : GeoDataFrame
        GeoDataFrame containing morphological tessellation with added block ID
    blocks : GeoDataFrame
        GeoDataFrame containing generated blocks
    """

    cells_copy = cells.copy()

    print('Buffering streets...')
    street_buff = streets.copy()
    street_buff['geometry'] = streets.buffer(0.1)

    print('Generating spatial index...')
    streets_index = street_buff.sindex

    print('Difference...')
    cells_geom = cells_copy.geometry
    new_geom = []

    for ix, cell in tqdm(cells_geom.iteritems(), total=cells_geom.shape[0]):
        # find approximate matches with r-tree, then precise matches from those approximate ones
        possible_matches_index = list(streets_index.intersection(cell.bounds))
        possible_matches = street_buff.iloc[possible_matches_index]
        new_geom.append(cell.difference(possible_matches.geometry.unary_union))

    single_geom = []
    print('Defining adjacency...')
    for p in new_geom:
        if p.type == 'MultiPolygon':
            for polygon in p:
                single_geom.append(polygon)
        else:
            single_geom.append(p)

    blocks_gdf = gpd.GeoDataFrame(geometry=gpd.GeoSeries(single_geom))
    spatial_weights = Queen.from_dataframe(blocks_gdf, silence_warnings=True)

    patches = {}
    jID = 1
    for idx, row in tqdm(blocks_gdf.iterrows(), total=blocks_gdf.shape[0]):

        # if the id is already present in courtyards, continue (avoid repetition)
        if idx in patches:
            continue
        else:
            to_join = [idx]  # list of indices which should be joined together
            neighbours = []  # list of neighbours
            weights = spatial_weights.neighbors[
                idx]  # neighbours from spatial weights
            for w in weights:
                neighbours.append(w)  # make a list from weigths

            for n in neighbours:
                while n not in to_join:  # until there is some neighbour which is not in to_join
                    to_join.append(n)
                    weights = spatial_weights.neighbors[n]
                    for w in weights:
                        neighbours.append(
                            w
                        )  # extend neighbours by neighbours of neighbours :)
            for b in to_join:
                patches[b] = jID  # fill dict with values
            jID = jID + 1

    blocks_gdf['patch'] = blocks_gdf.index.map(patches)

    print('Defining street-based blocks...')
    blocks_single = blocks_gdf.dissolve(by='patch')

    blocks_single['geometry'] = blocks_single.buffer(0.1)

    print('Defining block ID...')  # street based
    blocks_single[id_name] = None
    blocks_single[id_name] = blocks_single[id_name].astype('float')
    b_id = 1
    for idx, row in tqdm(blocks_single.iterrows(),
                         total=blocks_single.shape[0]):
        blocks_single.loc[idx, id_name] = b_id
        b_id = b_id + 1

    print('Generating centroids...')
    buildings_c = buildings.copy()
    buildings_c['geometry'] = buildings_c.representative_point(
    )  # make centroids
    blocks_single.crs = buildings.crs

    print('Spatial join...')
    centroids_tempID = gpd.sjoin(buildings_c,
                                 blocks_single,
                                 how='left',
                                 op='intersects')

    tempID_to_uID = centroids_tempID[[unique_id, id_name]]

    print('Attribute join (tesselation)...')
    cells_copy = cells_copy.merge(tempID_to_uID, on=unique_id)

    print('Generating blocks...')
    blocks = cells_copy.dissolve(by=id_name)
    cells_copy = cells_copy.drop([id_name], axis=1)

    print('Multipart to singlepart...')
    blocks = multi2single(blocks)

    blocks['geometry'] = blocks.exterior

    uid = 1
    for idx, row in tqdm(blocks.iterrows(), total=blocks.shape[0]):
        blocks.loc[idx, id_name] = uid
        uid = uid + 1
        blocks.loc[idx, 'geometry'] = Polygon(row['geometry'])

    # if polygon is within another one, delete it
    sindex = blocks.sindex
    for idx, row in tqdm(blocks.iterrows(), total=blocks.shape[0]):
        possible_matches = list(sindex.intersection(row.geometry.bounds))
        possible_matches.remove(idx)
        possible = blocks.iloc[possible_matches]

        for idx2, row2 in possible.iterrows():
            if row['geometry'].within(row2['geometry']):
                blocks.loc[idx, 'delete'] = 1

    if 'delete' in blocks.columns:
        blocks = blocks.drop(list(blocks.loc[blocks['delete'] == 1].index))

    blocks_save = blocks[[id_name, 'geometry']]

    centroids_w_bl_ID2 = gpd.sjoin(buildings_c,
                                   blocks_save,
                                   how='left',
                                   op='intersects')
    bl_ID_to_uID = centroids_w_bl_ID2[[unique_id, id_name]]

    print('Attribute join (buildings)...')
    buildings = buildings.merge(bl_ID_to_uID, on=unique_id)

    print('Attribute join (tesselation)...')
    cells = cells.merge(bl_ID_to_uID, on=unique_id)

    print('Done')
    return (buildings, cells, blocks_save)
 def test_polygons(self):
     t1 = Polygon([(0, 0), (1, 0), (1, 1)])
     t2 = Polygon([(0, 0), (1, 1), (0, 1)])
     sq = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
     s = GeoSeries([t1, t2, sq])
     assert s.sindex.size == 3
Esempio n. 47
0
# Sets the position of the observer. Is later used to compute the satellite's position in the sky
obs = Topos(lat, lon, elevation_m=obsalt)
print('observer is located at :', obs)

#process the TLE/3LE
tlefilename = '3le.txt'
list_of_satellites = processTLE(tlefilename)
regionfilename = str(outputfolder) + '/tracks.reg'  # ds9 region output
initregionfile()  # create region file, fill region file header
initoutput()  # create output file

#create some geometrical elements with Shapely
listofpts = [(ra_max, dec_min), (ra_max, dec_max), (ra_min, dec_max),
             (ra_min, dec_min)]
area = Polygon(listofpts)  #   creates a rectangle for the region observed

diag1 = LineString([listofpts[3], listofpts[1]])
diag2 = LineString([listofpts[0], listofpts[2]])
center = diag1.intersection(
    diag2
)  #compute the center of the area, not used later in the code but could be useful

# main loop, iterate on all satellites
for s in range(len(list_of_satellites)):

    if 'DEB' in list_of_satellites[
            s].name:  #skip all the debris. You can remove this line if you want to include them
        continue

    inter, dist, alt = detectsat(list_of_satellites[s], 2, y, m, d, h, minstop,
Esempio n. 48
0
def getStreetClusters(city='Berlin', storage='', k=4):
    """
        Creates cluster files for the selected city to be used for street based pooling
    
        input:
            city: city for which the cluster files should be created
            storage: mask storage folder
            k: kernel size of the cluster
    """

    #Load mask of the city (needed in case the OSM data changed since it was created)
    mainMask = pickle.load(open(os.path.join(storage, city + '.mask'), 'rb'))

    # Coordinates of the bounding boxes
    if city == 'Berlin':
        yMin = 52.359
        yMax = 52.854
        xMin = 13.189
        xMax = 13.625
    elif city == 'Moscow':
        yMin = 55.506
        yMax = 55.942
        xMin = 37.357
        xMax = 37.852
    elif city == 'Istanbul':
        yMin = 40.810
        yMax = 41.305
        xMin = 28.794
        xMax = 29.230

    # Load street data
    G = ox.graph_from_bbox(yMax,
                           yMin,
                           xMax,
                           xMin,
                           network_type='drive',
                           truncate_by_edge=True)

    print('Graph loaded for ' + city)

    # Convert street data to GeoDataFrame
    nodes, edges = ox.graph_to_gdfs(G)

    # Size of the grid cells
    length = 0.001

    # Bounding boxes of the grid cells
    cols = np.arange(xMin, xMax, length)
    rows = np.arange(yMin, yMax, length)

    polygons = []
    xList = []
    yList = []

    # Build grid cells representing the pixels of the traffic4cast images
    for x in cols:
        for y in rows:
            polygons.append(
                Polygon([(x, y), (x + length, y), (x + length, y + length),
                         (x, y + length)]))
            xList.append(int(np.round(1000 * (x - xMin))))
            yList.append(int(np.round(1000 * (y - yMin))))

    grid = gpd.GeoDataFrame({'geometry': polygons, 'x': xList, 'y': yList})

    edges.crs = grid.crs

    # intersect road data with grid
    joined = gpd.sjoin(edges, grid, op='intersects')

    print('Intersections built for ' + city)

    mask = np.zeros((495, 436))

    # Create different masks for different road categories
    if city == 'Moscow':

        split1 = joined[joined.highway == 'motorway']
        split2 = joined[joined.highway == 'trunk']
        split3 = joined[joined.highway == 'trunk_link']

        mask1 = mask.copy()
        for idx, row in split1.iterrows():
            mask1[row.x, row.y] = 1
        for idx, row in split2.iterrows():
            mask1[row.x, row.y] = 1
        for idx, row in split3.iterrows():
            mask1[row.x, row.y] = 1
        mask1 = np.flip(mask1)
        mask1[mainMask == 0] = 0

        split4 = joined[joined.highway == 'primary']
        split5 = joined[joined.highway == 'primary-link']

        mask2 = mask.copy()

        for idx, row in split4.iterrows():
            mask2[row.x, row.y] = 1
        for idx, row in split5.iterrows():
            mask2[row.x, row.y] = 1
        mask2 = np.flip(mask2)

        mask2 = mask2 - mask1
        mask2[mask2 < 0] = 0
        mask2[mainMask == 0] = 0

        split6 = joined[joined.highway == 'secondary']
        split7 = joined[joined.highway == 'secondary-link']

        mask3 = mask.copy()

        for idx, row in split6.iterrows():
            mask3[row.x, row.y] = 1
        for idx, row in split7.iterrows():
            mask3[row.x, row.y] = 1
        mask3 = np.flip(mask3)

        mask3 = mask3 - mask2 - mask1
        mask3[mask3 < 0] = 0
        mask3[mainMask == 0] = 0

        split8 = joined[joined.highway == 'tertiary']
        split9 = joined[joined.highway == 'tertiary-link']

        mask4 = mask.copy()

        for idx, row in split8.iterrows():
            mask4[row.x, row.y] = 1
        for idx, row in split9.iterrows():
            mask4[row.x, row.y] = 1
        mask4 = np.flip(mask4)

        mask4 = mask4 - mask3 - mask2 - mask1
        mask4[mask4 < 0] = 0
        mask4[mainMask == 0] = 0

        mask5 = mainMask - mask4 - mask3 - mask2 - mask1
        mask5[mask5 < 0] = 0

    else:
        split1 = joined[joined.highway == 'motorway']
        split2 = joined[joined.highway == 'motorway_link']

        mask = np.zeros((495, 436))
        mask1 = mask.copy()
        for idx, row in split1.iterrows():
            mask1[row.y, row.x] = 1
        for idx, row in split2.iterrows():
            mask1[row.y, row.x] = 1
        mask1 = np.flip(mask1, 0)
        mask1[mainMask == 0] = 0

        split3 = joined[joined.highway == 'primary']
        split4 = joined[joined.highway == 'primary-link']

        mask2 = mask.copy()

        for idx, row in split3.iterrows():
            mask2[row.y, row.x] = 1
        for idx, row in split4.iterrows():
            mask2[row.y, row.x] = 1
        mask2 = np.flip(mask2, 0)

        mask2 = mask2 - mask1
        mask2[mask2 < 0] = 0
        mask2[mainMask == 0] = 0

        split5 = joined[joined.highway == 'secondary']
        split6 = joined[joined.highway == 'secondary-link']

        mask3 = mask.copy()

        for idx, row in split5.iterrows():
            mask3[row.y, row.x] = 1
        for idx, row in split6.iterrows():
            mask3[row.y, row.x] = 1
        mask3 = np.flip(mask3, 0)

        mask3 = mask3 - mask2 - mask1
        mask3[mask3 < 0] = 0
        mask3[mainMask == 0] = 0

        split7 = joined[joined.highway == 'tertiary']
        split8 = joined[joined.highway == 'tertiary-link']

        mask4 = mask.copy()

        for idx, row in split7.iterrows():
            mask4[row.y, row.x] = 1
        for idx, row in split8.iterrows():
            mask4[row.y, row.x] = 1
        mask4 = np.flip(mask4, 0)

        mask4 = mask4 - mask3 - mask2 - mask1
        mask4[mask4 < 0] = 0
        mask4[mainMask == 0] = 0

        mask5 = mainMask - mask4 - mask3 - mask2 - mask1
        mask5[mask5 < 0] = 0

    maskMix = mask.copy()

    # Put different categories together
    maskMix[mask1.astype(int) == 1] = 1
    maskMix[mask2.astype(int) == 1] = 2
    maskMix[mask3.astype(int) == 1] = 3
    maskMix[mask4.astype(int) == 1] = 4
    maskMix[mask5.astype(int) == 1] = 5

    clusterNr = 0
    clusters = {}

    # Cluster points in the same category and kernel together
    for i in range(1, 6):
        maskpos = np.where(maskMix == i)
        arr = np.asarray(maskpos)
        maskCluster = arr // k
        clusters[i] = clusterNr + maskCluster[0] * int(np.ceil(
            435 / k)) + maskCluster[1]
        clusterNr = max(clusters[i])

    maskpos = np.where(maskMix == 1)

    maskCluster = mask.copy()
    for i in range(1, 6):
        maskpos = np.where(maskMix == i)
        maskCluster[maskpos] = clusters[i]

    # Calculate cluster centers
    j = 0
    clusterCenters = np.zeros((len(np.unique(maskCluster)) - 1, 2))
    for i in np.unique(maskCluster):
        if i != 0:
            pos = np.where(maskCluster == i)
            x = pos[0]
            y = pos[1]
            meanX = np.mean(x)
            meanY = np.mean(y)
            clusterCenters[j][0] = meanX
            clusterCenters[j][1] = meanY

            j += 1

    # Get cluster and category information
    index = np.where(maskMix > 0)
    cats = maskMix[index].astype(int)
    clusters = maskCluster[index].astype(int)

    # Save cluster, category and cluster center files
    pickle.dump(clusterCenters,
                open(os.path.join(storage, city + str(k) + '.centers'), 'wb'))
    pickle.dump(cats, open(os.path.join(storage, city + str(k) + '.cats'),
                           'wb'))
    pickle.dump(clusters,
                open(os.path.join(storage, city + str(k) + '.clusters'), 'wb'))
    print('Process done for ' + city)
Esempio n. 49
0
 def __gt__(self, other):
     return Polygon(self.o.exterior).contains(other.o)
Esempio n. 50
0
def tessellation(buildings,
                 unique_id='uID',
                 cut_buffer=50,
                 queen_corners=False,
                 minimum=2):
    """
    Generate morphological tessellation around given buildings.

    Parameters
    ----------
    buildings : GeoDataFrame
        GeoDataFrame containing building footprints
    unique_id : str
        name of the column with unique id. If there is none, it could be generated by unique_id().
    cut_buffer : float
        buffer around buildings limiting the extend of tessellation

    Returns
    -------
    GeoDataFrame
        GeoDataFrame of morphological tessellation with the unique id based on original buildings.

    Notes
    -------
    Fix saw-like geometry.
    """
    # reprojected_crs = buildings.crs.copy()
    #
    # reprojected_crs['x_0'] = 0
    # reprojected_crs['y_0'] = 0

    tqdm.pandas()
    # move dataframe close to 0 to eliminate imprecision of Qhull
    bounds = buildings['geometry'].bounds
    centre_x = -(bounds['maxx'].max() + bounds['minx'].min()) / 2
    centre_y = -(bounds['maxy'].max() + bounds['miny'].min()) / 2
    objects = buildings.copy()
    objects['geometry'] = objects['geometry'].translate(xoff=centre_x,
                                                        yoff=centre_y)

    # buffer geometry to resolve shared walls
    print('Bufferring geometry...')
    objects['geometry'] = objects.geometry.apply(
        lambda g: g.buffer(-0.5, cap_style=2, join_style=2))

    # simplify geometry before Voronoi
    print('Simplifying geometry...')
    objects['geometry'] = objects.simplify(0.25, preserve_topology=True)
    obj_simple = objects.copy()

    print('Preparing buffer zone for edge resolving (buffering)...')
    obj_simple['geometry'] = obj_simple.buffer(cut_buffer)

    print('Preparing buffer zone for edge resolving (dissolving)...')
    obj_simple['diss'] = 0
    built_up_df = obj_simple.dissolve(by='diss')
    built_up = built_up_df.geometry[0]

    # resolve multipart polygons, singlepart are needed for densification
    print('Converting multipart geometry to singlepart...')
    objects = multi2single(objects)

    # densify geometry before Voronoi tesselation
    def _densify(geom):
        poly = geom
        wkt = geom.wkt  # shapely Polygon to wkt
        geom = ogr.CreateGeometryFromWkt(wkt)  # create ogr geometry
        geom.Segmentize(2)  # densify geometry by 2 metres
        geom.CloseRings()  # fix for GDAL 2.4.1 bug
        wkt2 = geom.ExportToWkt()  # ogr geometry to wkt
        try:
            new = loads(wkt2)  # wkt to shapely Polygon
            return new
        except:
            return poly

    print('Densifying geometry...')
    objects['geometry'] = objects['geometry'].progress_map(_densify)

    print('Generating input point array...')
    points = []
    ids = []
    for idx, row in tqdm(objects.iterrows(), total=objects.shape[0]):
        poly_ext = row['geometry'].boundary
        if poly_ext is not None:
            if poly_ext.type == 'MultiLineString':
                for line in poly_ext:
                    point_coords = line.coords
                    row_array = np.array(point_coords).tolist()
                    for i in range(len(row_array)):
                        points.append(row_array[i])
                        ids.append(row[unique_id])
            elif poly_ext.type == 'LineString':
                point_coords = poly_ext.coords
                row_array = np.array(point_coords).tolist()
                for i in range(len(row_array)):
                    points.append(row_array[i])
                    ids.append(row[unique_id])
            else:
                raise Exception('Boundary type is {}'.format(poly_ext.type))

    # add convex hull buffered by cut distance to eliminate infinity issues
    print('Generating convex hull...')
    hull = built_up.convex_hull.buffer(cut_buffer)
    hull_array = np.array(hull.boundary.coords).tolist()
    for i in range(len(hull_array)):
        points.append(hull_array[i])
        ids.append(-1)

    voronoi_points = np.array(points)  # array of points as an input of Voronoi

    print('Generating Voronoi diagram...')
    voronoi_diagram = Voronoi(voronoi_points)

    print('Generating GeoDataFrame...')
    # generate DataFrame of results
    regions = pd.DataFrame()
    regions[unique_id] = ids  # add unique id
    regions[
        'region'] = voronoi_diagram.point_region  # add region id for each point

    # add vertices of each polygon
    vertices = []
    for region in regions.region:
        vertices.append(voronoi_diagram.regions[region])
    regions['vertices'] = vertices

    # convert vertices to Polygons
    polygons = []
    for region in tqdm(regions.vertices, desc='Vertices to Polygons'):
        if -1 not in region:
            polygons.append(Polygon(voronoi_diagram.vertices[region]))
        else:
            polygons.append(None)
    # save polygons as geometry column
    regions['geometry'] = polygons

    # generate GeoDataFrame
    regions_gdf = gpd.GeoDataFrame(regions.dropna(), geometry='geometry')
    regions_gdf = regions_gdf.loc[regions_gdf['geometry'].length <
                                  1000000]  # delete errors
    regions_gdf = regions_gdf.loc[regions_gdf[unique_id] !=
                                  -1]  # delete hull-based cells
    regions_gdf.crs = buildings.crs

    print('Dissolving Voronoi polygons...')
    morphological_tessellation = regions_gdf[[unique_id, 'geometry'
                                              ]].dissolve(by=unique_id,
                                                          as_index=False)

    # cut infinity of voronoi by set buffer (thanks for script to Geoff Boeing)
    print('Preparing buffer zone for edge resolving (quadrat cut)...')
    geometry = built_up.boundary
    geometry_cut = ox.quadrat_cut_geometry(geometry, quadrat_width=100)

    print('Building R-tree...')
    sindex = morphological_tessellation.sindex
    # find the points that intersect with each subpolygon and add them to points_within_geometry
    to_cut = pd.DataFrame()
    for poly in geometry_cut:
        # find approximate matches with r-tree, then precise matches from those approximate ones
        possible_matches_index = list(sindex.intersection(poly.bounds))
        possible_matches = morphological_tessellation.iloc[
            possible_matches_index]
        precise_matches = possible_matches[possible_matches.intersects(poly)]
        to_cut = to_cut.append(precise_matches)

    # delete duplicates
    to_cut = to_cut.drop_duplicates(subset=[unique_id])
    subselection = list(to_cut.index)

    print('Cutting...')
    for idx, row in tqdm(
            morphological_tessellation.loc[subselection].iterrows(),
            total=morphological_tessellation.loc[subselection].shape[0]):
        intersection = row.geometry.intersection(built_up)
        if intersection.type == 'MultiPolygon':
            areas = {}
            for p in range(len(intersection)):
                area = intersection[p].area
                areas[p] = area
            maximal = max(areas.items(), key=operator.itemgetter(1))[0]
            morphological_tessellation.loc[idx,
                                           'geometry'] = intersection[maximal]
        elif intersection.type == 'GeometryCollection':
            for geom in list(intersection.geoms):
                if geom.type != 'Polygon':
                    pass
                else:
                    morphological_tessellation.loc[idx, 'geometry'] = geom
        else:
            morphological_tessellation.loc[idx, 'geometry'] = intersection

    # check against input layer
    ids_original = list(buildings[unique_id])
    ids_generated = list(morphological_tessellation[unique_id])
    if len(ids_original) != len(ids_generated):
        import warnings
        diff = set(ids_original).difference(ids_generated)
        warnings.warn(
            "Tessellation does not fully match buildings. {len} element(s) collapsed "
            "during generation - unique_id: {i}".format(len=len(diff), i=diff))

    # check MultiPolygons - usually caused by error in input geometry
    uids = morphological_tessellation[morphological_tessellation.geometry.type
                                      == 'MultiPolygon'][unique_id]
    if len(uids) > 0:
        import warnings
        warnings.warn(
            'Tessellation contains MultiPolygon elements. Initial objects should be edited. '
            'unique_id of affected elements: {}'.format(list(uids)))

    # resolve unprecise corners
    if queen_corners is True:
        print('Generating queen corners...')
        print(' Generating spatial index...')
        changes = {}
        qid = 0
        # detect points which should be changed and calculate new coordinates
        print(' Detecting points of change...')
        for ix, row in tqdm(morphological_tessellation.iterrows(),
                            total=morphological_tessellation.shape[0]):
            corners = []
            change = []

            cell = row.geometry
            coords = cell.exterior.coords
            for i in coords:
                point = Point(i)
                possible_matches_index = list(sindex.intersection(
                    point.bounds))
                possible_matches = morphological_tessellation.iloc[
                    possible_matches_index]
                precise_matches = sum(possible_matches.intersects(point))
                if precise_matches > 2:
                    corners.append(point)

            if len(corners) > 2:
                for c in range(len(corners)):
                    next_c = c + 1
                    if c == (len(corners) - 1):
                        next_c = 0
                    if corners[c].distance(corners[next_c]) < minimum:
                        change.append([corners[c], corners[next_c]])
            elif len(corners) == 2:
                if corners[0].distance(corners[1]) > 0:
                    if corners[0].distance(corners[1]) < minimum:
                        change.append([corners[0], corners[1]])

            if change:
                for points in change:
                    x_new = np.mean([points[0].x, points[1].x])
                    y_new = np.mean([points[0].y, points[1].y])
                    new = [(x_new, y_new), id]
                    changes[(points[0].x, points[0].y)] = new
                    changes[(points[1].x, points[1].y)] = new
                    qid = qid + 1

        print(' Generating new geometry...')
        for ix, row in tqdm(morphological_tessellation.iterrows(),
                            total=morphological_tessellation.shape[0]):
            cell = row.geometry
            coords = list(cell.exterior.coords)

            moves = {}
            for x in coords:
                if x in changes.keys():
                    moves[coords.index(x)] = changes[x]
            keys = list(moves.keys())
            delete_points = []
            for move in range(len(keys)):
                if move < len(keys) - 1:
                    if moves[keys[move]][1] == moves[keys[
                            move + 1]][1] and keys[move + 1] - keys[move] < 5:
                        delete_points = delete_points + (
                            coords[keys[move]:keys[move + 1]])
                        # change the code above to have if based on distance not number

            newcoords = [
                changes[x][0] if x in changes.keys() else x for x in coords
            ]
            for coord in newcoords:
                if coord in delete_points:
                    newcoords.remove(coord)
            if coords != newcoords:
                if not cell.interiors:
                    # newgeom = Polygon(newcoords).buffer(0)
                    be = Polygon(newcoords).exterior
                    mls = be.intersection(be)
                    if len(list(shapely.ops.polygonize(mls))) > 1:
                        newgeom = MultiPolygon(shapely.ops.polygonize(mls))
                        geoms = []
                        for g in range(len(newgeom)):
                            geoms.append(newgeom[g].area)
                        newgeom = newgeom[geoms.index(max(geoms))]
                    else:
                        newgeom = list(shapely.ops.polygonize(mls))[0]
                else:
                    newgeom = Polygon(newcoords, holes=cell.interiors)
                morphological_tessellation.loc[ix, 'geometry'] = newgeom

        # check against input layer
        ids_original = list(buildings[unique_id])
        ids_generated = list(morphological_tessellation[unique_id])
        if len(ids_original) != len(ids_generated):
            import warnings
            diff = set(ids_original).difference(ids_generated)
            warnings.warn(
                "Tessellation does not fully match buildings. {len} element(s) collapsed "
                "during generation - unique_id: {i}".format(len=len(diff),
                                                            i=diff))

        # check MultiPolygons - usually caused by error in input geometry
        uids = morphological_tessellation[morphological_tessellation.geometry.
                                          type == 'MultiPolygon'][unique_id]
        if len(uids) > 0:
            import warnings
            warnings.warn(
                'Tessellation contains MultiPolygon elements. Initial objects should be edited. '
                'unique_id of affected elements: {}'.format(list(uids)))

    # translate back to true position
    morphological_tessellation['geometry'] = morphological_tessellation[
        'geometry'].translate(xoff=-centre_x, yoff=-centre_y)
    print('Tessellation finished.')
    return morphological_tessellation
Esempio n. 51
0
    def _update(self,
                pose: Pose,
                from_timestamp: float,
                radius,
                hypothesis=False,
                pVx=param._HYPOPEDES_VX,
                pCovLat=param._HYPOPEDES_COV_LAT,
                pCovLon=param._HYPOPEDES_COV_LON,
                pCrossRate=param._PEDES_APPEAR_RATE_CROSS,
                pStreetRate=param._PEDES_APPEAR_RATE_STREET,
                pOtherRate=param._PEDES_APPEAR_RATE_OTHER,
                pDistanceThres=param._PEDES_OTHER_MIN_THRESHOLD,
                vVx=param._HYPOVEH_VX,
                vCovLat=param._HYPOVEH_COV_LAT,
                vCovLon=param._HYPOVEH_COV_LON,
                vRate=param._APPEAR_RATE_VEH):
        """
        Get environment around a given position
        """
        self._l_hypoPedes = []
        self._l_hypoVehicle = []
        l_fov = {}
        l_vehicle = []
        l_staticVehicle = []
        l_object = []
        l_pedestrian = []
        l_polys = []
        currentPos = np.array([pose.x_m, pose.y_m])
        searchRadius = radius + 1
        # find static object
        for sObj in self._l_staticObject:
            objPoly = sObj._poly
            d2ego = np.linalg.norm(objPoly - currentPos, axis=1)
            if ((d2ego) < radius).any():
                l_object.append(sObj)
                l_polys.append(objPoly)

        # find vehicle poly
        for veh in self._l_vehicle:
            vehPose = veh.getCurrentPose()
            if vehPose.timestamp_s == from_timestamp:
                vehPos = np.array([vehPose.x_m, vehPose.y_m])
                if np.linalg.norm(vehPos - currentPos) < searchRadius:
                    vehPoly = veh.getCurrentPoly()
                    l_polys.append(vehPoly)

        # generate field of view
        self._fov, self._fovRange = pfnc.FOV(pose=pose,
                                             polys=l_polys,
                                             angle=param._FOV_ANGLE,
                                             radius=radius,
                                             nrRays=param._FOV_RAYS)
        fov_poly = Polygon(self._fov)

        # generate hypothesis from static object
        for sObj in l_object:
            objPoly = sObj._poly
            if pfnc.inPolyPointList(objPoly, fov_poly) and hypothesis:
                l_fov_d, hasHypo = self._generateHypothesis(
                    pose, objPoly, fov_poly, radius, pVx, pCovLat, pCovLon,
                    pCrossRate, pStreetRate, pOtherRate, pDistanceThres, vVx,
                    vCovLat, vCovLon, vRate)
                if hasHypo:
                    l_fov.update({sObj._idx: l_fov_d})

        # check other moving vehicle in FOV
        for veh in self._l_vehicle:
            vehPose = veh.getCurrentPose()
            if vehPose.timestamp_s == from_timestamp:
                vehPos = np.array([vehPose.x_m, vehPose.y_m])
                # if np.linalg.norm(vehPos - currentPos) < searchRadius:
                vehPoly = veh.getCurrentPoly()
                if pfnc.inPolyPointList(vehPoly, fov_poly):
                    veh.setDetected(True)
                    veh.setDetectedTime()
                    # generate hypothesis with low speed vehicle
                    if hypothesis and vehPose.vdy.vx_ms < 3:
                        l_fov_d, hasHypo = self._generateHypothesis(
                            pose,
                            vehPoly,
                            fov_poly,
                            radius,
                            pVx,
                            pCovLat,
                            pCovLon,
                            pCrossRate,
                            pStreetRate,
                            pOtherRate,
                            pDistanceThres,
                            vVx,
                            vCovLat,
                            vCovLon,
                            vRate,
                            objectVehicle=True)
                        if hasHypo:
                            l_fov.update({veh._idx: l_fov_d})
                    if vehPose.vdy.vx_ms > 0.5:
                        l_vehicle.append(veh)
                    else:
                        l_staticVehicle.append(veh)
                    continue
                else:
                    veh.setDetected(False)
                    continue
            # else:
            #     veh.setDetected(False)

        # check pedestrian in FOV
        for pedes in self._l_pedestrian:
            pedesPose = pedes.getCurrentPose()
            if pedesPose.timestamp_s == from_timestamp:
                pedesPos = np.array([pedesPose.x_m, pedesPose.y_m])
                if np.linalg.norm(pedesPos - currentPos) < searchRadius:
                    # if pfnc.inPolygonPoint(pedesPos, fov):
                    if pfnc.inPolyPoint(pedesPos, fov_poly):
                        pedes.setDetected(True)
                        pedes.setDetectedTime()
                        if pedesPose.vdy.vx_ms > 0:
                            l_pedestrian.append(pedes)
                        continue
                    else:
                        pedes.setDetected(False)
                        continue
            pedes.setDetected(False)

        self._l_update = {
            'vehicle': l_vehicle,
            'staticObject': l_object,
            'pedestrian': l_pedestrian,
            'hypoPedestrian': self._l_hypoPedes,
            'hypoVehicle': self._l_hypoVehicle,
            'staticVehicle': l_staticVehicle,
            'fovDistance': l_fov
        }
        del l_fov, l_vehicle, l_staticVehicle, l_pedestrian, l_object, l_polys, currentPos, searchRadius
Esempio n. 52
0
import geopandas as gpd
from shapely.geometry import Polygon, Point
import matplotlib.pyplot as plt

lon_point_list = [447123, 448230, 448230, 447123]
lat_point_list = [4948000, 4948000, 4941720, 4941720]

lon_point_list2 = [5, 3, 2]
lat_point_list2 = [1, 3, 1]

polygon_geom = Polygon(zip(lon_point_list, lat_point_list))
polygon_geom2 = Polygon(zip(lon_point_list2, lat_point_list2))

p1 = Point(445000, 4937500).buffer(1)
p2 = Point(460000, 4952500).buffer(1)

print(p1.within(polygon_geom))
print(p2.within(polygon_geom))
print(p1.distance(p2))
print(polygon_geom.contains(p1))
print(polygon_geom.contains(p2))
print(polygon_geom.within(p1))
print(polygon_geom.within(p2))
if polygon_geom2.within(p1):
    print(p1)
if polygon_geom2.within(p2):
    print(p2)

crs = {'init': 'epsg:4326'}
polygon = gpd.GeoDataFrame(index=[0, 1, 3],
                           crs=crs,
 def initPolygonObject(self):
     coords = [(self.topLeft['x'], self.topLeft['y']),
               (self.topRight['x'], self.topRight['y']),
               (self.bottomRight['x'], self.bottomRight['y']),
               (self.bottomLeft['x'], self.bottomLeft['y'])]
     self.poly = Polygon(coords)  # shapely Object
Esempio n. 54
0
    def __lt__(self, other):
        return self.o.contains(other.o)

if 0:
    print("now showing sorted")
    for p in sorted(polygonize(mls4), key=Contains):
        plot_polys([p])
        plt.show()

polys = list(polygonize(mls4))
print("0 contains 1? {}".format(polys[0].contains(polys[1])))
print("1 contains 0? {}".format(polys[1].contains(polys[0])))

print("0 exterior contains 1 exterior? {}".format(polys[0].exterior.contains(polys[1].exterior)))

print("0 exterior polygon contains 1 exterior? {}".format(Polygon(polys[0].exterior).contains(polys[1].exterior)))



class ContainsExteriorPolygon(object):
    def __init__(self, o):
        self.o = o
    def __gt__(self, other):
        return Polygon(self.o.exterior).contains(other.o)

if 0:
    for p in sorted(polygonize(mls4), key=ContainsExteriorPolygon, reverse=True):
        plot_polys([p])
        plt.show()

Esempio n. 55
0
# """This file contains the SQLAlchemy ORM models"""

from sqlalchemy.orm import synonym
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
from sqlalchemy.schema import Sequence
from geoalchemy2.types import Geometry
from geoalchemy2.shape import from_shape, to_shape
import random
from datetime import datetime
from maproulette import app, db
from shapely.geometry import Polygon, Point, MultiPoint
import pytz

random.seed()

world_polygon = Polygon([(-180, -90), (-180, 90), (180, 90), (180, -90),
                         (-180, -90)])


def getrandom():
    return random.random()


class User(db.Model):
    """A MapRoulette User"""

    __tablename__ = 'users'

    id = db.Column(db.Integer, unique=True, primary_key=True, nullable=False)
    oauth_token = db.Column(db.String)
    oauth_secret = db.Column(db.String)
    display_name = db.Column(db.String, nullable=False)
Esempio n. 56
0
def Digitize(file_name,stepover):
    
    #x,y = boundary(raw_input("Enter file\n"))
    #x,y = boundary("b (2).txt")
    x,y = boundary(file_name)
    #stepover = input("Enter the stepover distance\n")
    stepover = float(stepover)
    
    if(len(x)==len(y)):
        print("Co-ordinates Loaded :) ")
        #print(len(x))

    points = []
    for i in range(len(x)):
        points.append([x[i],y[i]])
    orig_poly=Polygon(points)
    area_orig = orig_poly.area
    perimeter_orig = orig_poly.length
    print("Points List has been made\n")
    #area=PolyArea(x,y)
    #print(area)
    print("Area of Boundary = " + str(area_orig))
    print("Perimeter of Boundary = " + str(perimeter_orig))
    
    #p = Point(35,-120)
    #d= p.distance(orig_poly)  
    #print(d) 

    plt.plot(x,y)

    axes = plt.gca()
    plt.axis('scaled')
    plt.savefig('figure.png')
    plt.close()
    #plt.show()

    xmin = min(x)-stepover
    xmax = max(x)+stepover
    ymin = min(y)-stepover
    ymax = max(y)+stepover

    print("Minimum X: " + str(xmin))
    print("Maximum X: " + str(xmax))
    print("Minimum Y: " + str(ymin))
    print("Maximum X: " + str(ymax))


    y_plot_min,y_plot_max = axes.get_ylim()
    x_plot_min,x_plot_max = axes.get_xlim()

    x_init = x_plot_min
    y_init = y_plot_min
    x_end = x_plot_max
    y_end = y_plot_max

    print("Plot Min Lim: " + str(x_init) + " , " + str(y_init))
    print("Plot Max Lim: " + str(x_end) + " , " + str(y_end))
    print("\n\nDigitizing.....")

    all_points = []

    for i in drange(float(xmin),float(xmax),stepover):
        for j in drange(int(ymin),int(ymax),stepover):
            all_points.append([i,j])
            #plt.scatter(i,j,c='b',s=2)
    
    #plt.axis('scaled')
    #plt.show()
    #print(points)
    inside_points=[]
    p= mplPath.Path(points)
    for point in all_points:
        inside=p.contains_point((point))
        e = Point(point)
        d= e.distance(orig_poly)
        if(inside == True):
            inside_points.append([point[0],point[1]])
            plt.scatter(point[0],point[1],c='b',s=2)
        elif (d<= stepover):
            inside_points.append([point[0],point[1]])
            plt.scatter(point[0],point[1],c='b',s=2)
            #print(point[0],point[1])
    
    #print(inside_points)
    plt.axis('scaled')
    plt.savefig('figure' + str(int(stepover)) + '.png')
    plt.close()
    #plt.show()
    print("Digitization complete\n")
    print("running TSP")
    #print(inside_points[0][0])
    f= open("pic4.PTS","w")
    f.write("# x-coord y-coord radius\n")
    i=0
    for point in inside_points:
        #print(point)
        f.write(str(point[0])+" "+str(point[1])+"\n")
    f.close()
    #print(inside_points)
    
    import tspart
    tspart.TSP('','')
    print("plotting result")
    import plot_points
    sorted_x,sorted_y = plot_points.tspPlot('pic4.PTS','optimal_path')
    
    plt.plot(sorted_x,sorted_y)
    #plt.plot(x,y)
    plt.axis('scaled')
    plt.savefig('figure' + str(int(stepover)) + '_' + str(int(stepover))+ '.png')
    plt.close()
    #plt.show()
    
    sorted_points = []
    for i in range(len(sorted_x)):
        sorted_points.append([sorted_x[i],sorted_y[i]])
    final_poly=Polygon(sorted_points)
    area_final = final_poly.area
    perimeter_final = final_poly.length
    print(area_final)
    print(perimeter_final)
    
    effeciency = area_orig/((perimeter_final)*stepover)
    
    return effeciency
Esempio n. 57
0
def get_cw_polys(polys):
    return [
        poly[::-1] if Polygon(poly).exterior.is_ccw else poly for poly in polys
    ]
Esempio n. 58
0
    def __init__(self,
                 id,
                 nbuff,
                 movers=[],
                 reactors=[],
                 stickers=[],
                 diffusers=[],
                 tstart=None,
                 tend=None,
                 tstep=0.,
                 tstep_release=0.,
                 outfile=None,
                 P0=[0, 0, 0],
                 spawn=1,
                 reln=0,
                 R0=1.,
                 Q0=1.,
                 unstick=0.,
                 **prop):
        self.id = id
        self.np = 0
        self.ninc = 1  #Counter for unique numbering
        self.tstep = prop.get('tstep', 0.)
        self.tstep_release = tstep_release
        self.npmax = nbuff
        self.tstart = parsetime(tstart) if tstart else 0.
        self.tend = parsetime(tend) if tend else 1.e10
        self.state = numpy.zeros((nbuff + 1), 'i')  #State of particle
        self.age = numpy.zeros((nbuff + 1))
        self.mass = numpy.ones(
            (nbuff + 1)
        )  #Mass is used for particle weighting - could also represent volume or counts
        self.nid = numpy.zeros((nbuff + 1))  #Array for unique numbering
        self.props = copy.copy(self.default_props)
        self.props['P0'] = P0
        self.props['spawn'] = spawn
        self.props.update(prop)
        # Release Options
        #
        # Initialization of particle position vector
        if numpy.size(P0[2]) == 1:
            # P0:single position x,y,z
            # pre-allocate with a single release position
            self.pos = numpy.array(P0) * numpy.ones((nbuff + 1, 3))
            self.post = numpy.array(P0) * numpy.ones((nbuff + 1, 3))
        elif numpy.size(P0[2]) == 2:
            # P0:single x,y position but range of vertical z level
            # random position allocated within that range
            # Initialize variables
            P01 = [P0[0], P0[1], P0[2][0]]
            self.pos = numpy.array(P01) * numpy.ones((nbuff + 1, 3))
            self.post = numpy.array(P01) * numpy.ones((nbuff + 1, 3))
            #thickness of z layer
            dz = abs(P0[2][0] - P0[2][1])
            # depth are supposed to be negative
            # import pdb; pdb.set_trace()
            zz = numpy.random.random(nbuff + 1)
            self.pos[:, 2] = min(P0[2]) + dz * zz
            self.post[:, 2] = min(P0[2]) + dz * zz

        if "circular_radius" in self.props:
            #release in a circle rather than at a single X,Y point location
            #1 deg lat = 110 km, 1 deg lon= 111.32km*cos(lat)
            deg_in_m_lon = 111320.0 * numpy.cos(-numpy.pi * P0[1] / 180)
            deg_in_m_lat = 110574.0
            radius_lon = self.props['circular_radius'] / deg_in_m_lon
            radius_lat = self.props['circular_radius'] / deg_in_m_lat
            rand1 = numpy.random.random(nbuff + 1)
            rand2 = numpy.random.random(nbuff + 1)
            self.pos[:, 0] = self.pos[:, 0] + radius_lon * rand1 * numpy.ones(
                (1, nbuff + 1)) * numpy.cos(rand2 * 2 * numpy.pi)
            self.pos[:, 1] = self.pos[:, 1] + radius_lat * rand1 * numpy.ones(
                (1, nbuff + 1)) * numpy.sin(rand2 * 2 * numpy.pi)
            self.post[:, 0] = self.pos[:, 0]
            self.post[:, 1] = self.pos[:, 1]
            # Note this will not make a perfect circle, but approximation is likely good enough.

        if "polygon" in self.props:
            # release in a polygon shape
            poly = Polygon(self.props['polygon'])
            point_in_poly = get_random_point_in_polygon(nbuff, poly)
            self.pos[:, 0] = point_in_poly[:, 0]
            self.pos[:, 1] = point_in_poly[:, 1]
            self.post[:, 0] = self.pos[:, 0]
            self.post[:, 1] = self.pos[:, 1]

        #Could add same code for range of X,Y ?
        # e.g. if numpy.size(P0[0])==2 & numpy.size(P0[1])==2
        # then release along a line [X1,Y1] - [X2,Y2]
        #
        # End of Release Options
        self.reln = reln  #Particles per release
        self.R = R0  #Total release of material
        self.Q = Q0  # Flux of material per day
        self.unstick = unstick  # Can become unstuck - number is halflife
        self.movers = movers
        self.reactors = reactors
        self.stickers = stickers
        self.diffusers = diffusers
        self.mfx = numpy.ones((nbuff + 1, 3))
        self.arrays = []
        self.children = {}
        self.outfile = outfile if outfile else 'ercore.' + self.id + '.out'
        self.relsumt = 0.
        if self.tstart != self.tend:
            self._npt = 1. * self.reln / abs(self.tend - self.tstart)
        print(f'processing {aoi} ({i + 1}/{len(aois)}) ...')

        json_paths = glob(
            os.path.join(args.train_dir, aoi, 'labels_match_pix', '*.geojson'))
        json_paths.sort()

        out_dir = os.path.join(args.out_dir, aoi)
        os.makedirs(out_dir, exist_ok=False)

        for json_path in tqdm(json_paths):
            df = gpd.read_file(json_path)

            # add `area` column
            areas = []
            for index, row in df.iterrows():
                area = Polygon(row['geometry']).area
                areas.append(area)
            df['area'] = areas

            # filter out small polygons
            mask = df['area'] >= args.min_area
            df = df[mask]

            # dump
            output_path = os.path.join(out_dir, os.path.basename(json_path))
            if len(df) > 0:
                df.to_file(output_path, driver='GeoJSON')
            else:
                print(f'warning: {output_path} is an empty geojson.')
                save_empty_geojson(output_path)
Esempio n. 60
0
 def to_polygon(bbox):
     return Polygon([(bbox[2 * i], bbox[2 * i + 1]) for i in range(4)])