def filter_points(points, shape): fun = lambda point: shape.contains(geo.Point(point)) return list(filter(fun, points))
def polygon(self): return geometry.Point(0.0, 0.0).buffer(self.d / 2.0)
def findClosest(point, gridX, gridY): closeX = np.argmin(abs(point.x - gridX)) closeY = np.argmin(abs(point.y - gridY)) return geometry.Point(gridX[closeX], gridY[closeY])
def test_emptying_point(self): p = sgeom.Point(0, 0) self.assertFalse(p._is_empty) p.empty() self.assertTrue(p._is_empty)
to_date = date.today() + timedelta(days=1) DATE_TO = to_date.strftime("%Y-%m-%d") # Look back 1 day to make sure we have everything from_date = from_date - timedelta(days=1) DATE_FROM = from_date.strftime("%Y-%m-%d") results_object, code = get_file_list(DATE_FROM, DATE_TO) if results_object is None: exit(code) file_count = len(results_object['products']) logging.info("Downloading %d files", file_count) volcanos = numpy.asarray(config.VOLCANOS) volc_points = [geometry.Point(x['longitude'], x['latitude']) for x in volcanos] # setup import params for idx, product in enumerate(results_object['products']): uuid = product['uuid'] footprint = wkt.loads(product['wkt']) covered_volcs = [footprint.contains(x) for x in volc_points] covered_volcs = [x['name'] for x in volcanos[covered_volcs]] identifier = product['identifier'] id_parts = [x for x in identifier.split('_') if x] filetime = parse(id_parts[4] + "z") filedate = filetime.strftime('%Y-%m-%d') file_dir = os.path.join(config.FILE_BASE, DEST_DIR, filedate) volc_dir = os.path.join(config.FILE_BASE, DEST_DIR)
def packCurves(): if speedups.available: speedups.enable() t=time.time() packsettings=bpy.context.scene.cam_pack sheetsizex=packsettings.sheet_x sheetsizey=packsettings.sheet_y direction=packsettings.sheet_fill_direction distance=packsettings.distance rotate = packsettings.rotate polyfield=[]#in this, position, rotation, and actual poly will be stored. for ob in bpy.context.selected_objects: allchunks=[] simple.activate(ob) bpy.ops.object.make_single_user(type='SELECTED_OBJECTS') bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY') z=ob.location.z bpy.ops.object.location_clear() bpy.ops.object.rotation_clear() chunks=utils.curveToChunks(ob) npolys=utils.chunksToShapely(chunks) #add all polys in silh to one poly poly=shapely.ops.unary_union(npolys) poly=poly.buffer(distance/1.5,8) poly=poly.simplify(0.0003) polyfield.append([[0,0],0.0,poly,ob,z]) random.shuffle(polyfield) #primitive layout here: allpoly=sgeometry.Polygon()#main collision poly. shift=0.0015#one milimeter by now. rotchange=.3123456#in radians xmin,ymin,xmax,ymax=polyfield[0][2].bounds if direction=='X': mindist=-xmin else: mindist=-ymin i=0 p=polyfield[0][2] placedpolys=[] rotcenter=sgeometry.Point(0,0) for pf in polyfield: print(i) rot=0 porig=pf[2] placed=False xmin,ymin,xmax,ymax=p.bounds #p.shift(-xmin,-ymin) if direction=='X': x=mindist y=-ymin if direction=='Y': x=-xmin y=mindist iter=0 best=None hits=0 besthit=None while not placed: #swap x and y, and add to x #print(x,y) p=porig if rotate: #ptrans=srotate(p,rot,0,0) ptrans=affinity.rotate(p,rot,origin = rotcenter, use_radians=True) #ptrans = translate(ptrans,x,y) ptrans = affinity.translate(ptrans,x,y) else: #ptrans = translate(p,x,y) ptrans = affinity.translate(p,x,y) xmin,ymin,xmax,ymax=ptrans.bounds #print(iter,p.bounds) if xmin>0 and ymin>0 and ((direction=='Y' and xmax<sheetsizex) or (direction=='X' and ymax<sheetsizey)): if allpoly.disjoint(ptrans): #print('gothit') #we do more good solutions, choose best out of them: hits+=1 if best==None: best=[x,y,rot,xmax,ymax] besthit=hits if direction=='X': if xmax<best[3]: best=[x,y,rot,xmax,ymax] besthit=hits elif ymax<best[4]: best=[x,y,rot,xmax,ymax] besthit=hits if hits>=15 or (iter>10000 and hits>0):#here was originally more, but 90% of best solutions are still 1 placed=True pf[3].location.x=best[0] pf[3].location.y=best[1] pf[3].location.z=pf[4] pf[3].rotation_euler.z=best[2] pf[3].select=True #print(mindist) mindist=mindist-0.5*(xmax-xmin) #print(mindist) #print(iter) #reset polygon to best position here: ptrans=affinity.rotate(porig,best[2],rotcenter, use_radians = True) #ptrans=srotate(porig,best[2],0,0) ptrans = affinity.translate(ptrans,best[0],best[1]) #ptrans = translate(ptrans,best[0],best[1]) #polygon_utils_cam.polyToMesh(p,0.1)#debug visualisation keep=[] print(best[0],best[1]) #print(len(ptrans.exterior)) #npoly=allpoly.union(ptrans) ''' for ci in range(0,len(allpoly)): cminx,cmaxx,cminy,cmaxy=allpoly.boundingBox(ci) if direction=='X' and cmaxx>mindist-.1: npoly.addContour(allpoly[ci]) if direction=='Y' and cmaxy>mindist-.1: npoly.addContour(allpoly[ci]) ''' #allpoly=npoly placedpolys.append(ptrans) allpoly=prepared.prep(sgeometry.MultiPolygon(placedpolys)) #polygon_utils_cam.polyToMesh(allpoly,0.1)#debug visualisation #for c in p: # allpoly.addContour(c) #cleanup allpoly print(iter,hits,besthit) if not placed: if direction=='Y': x+=shift mindist=y if (xmax+shift>sheetsizex): x=x-xmin y+=shift if direction=='X': y+=shift mindist=x if (ymax+shift>sheetsizey): y=y-ymin x+=shift if rotate: rot+=rotchange iter+=1 i+=1 t=time.time()-t polygon_utils_cam.shapelyToCurve('test',sgeometry.MultiPolygon(placedpolys),0) print(t)
def _add_asset(self, idx, asset_node, param): values = {} deductibles = {} insurance_limits = {} retrofitted = None asset_id = asset_node['id'].encode('utf8') with context(param['fname'], asset_node): self.asset_refs.append(asset_id) taxonomy = asset_node['taxonomy'] if 'damage' in param['calculation_mode']: # calculators of 'damage' kind require the 'number' # if it is missing a KeyError is raised number = asset_node['number'] else: # some calculators ignore the 'number' attribute; # if it is missing it is considered 1, since we are going # to multiply by it try: number = asset_node['number'] except KeyError: number = 1 else: if 'occupants' in self.cost_types['name']: values['occupants_None'] = number location = asset_node.location['lon'], asset_node.location['lat'] if param['region'] and not geometry.Point(*location).within( param['region']): param['out_of_region'] += 1 return tagnode = getattr(asset_node, 'tags', None) dic = {} if tagnode is None else tagnode.attrib.copy() with context(param['fname'], tagnode): dic['taxonomy'] = taxonomy idxs = self.tagcol.add_tags(dic) try: costs = asset_node.costs except AttributeError: costs = Node('costs', []) try: occupancies = asset_node.occupancies except AttributeError: occupancies = Node('occupancies', []) for cost in costs: with context(param['fname'], cost): cost_type = cost['type'] if cost_type == 'structural': # retrofitted is defined only for structural retrofitted = cost.get('retrofitted') if cost_type in param['relevant_cost_types']: values[cost_type] = cost['value'] try: deductibles[cost_type] = cost['deductible'] except KeyError: pass try: insurance_limits[cost_type] = cost['insuranceLimit'] except KeyError: pass # check we are not missing a cost type missing = param['relevant_cost_types'] - set(values) if missing and missing <= param['ignore_missing_costs']: logging.warn('Ignoring asset %s, missing cost type(s): %s', asset_id, ', '.join(missing)) for cost_type in missing: values[cost_type] = None elif missing and 'damage' not in param['calculation_mode']: # missing the costs is okay for damage calculators with context(param['fname'], asset_node): raise ValueError("Invalid Exposure. " "Missing cost %s for asset %s" % (missing, asset_id)) tot_occupants = 0 for occupancy in occupancies: with context(param['fname'], occupancy): occupants = 'occupants_%s' % occupancy['period'] values[occupants] = float(occupancy['occupants']) tot_occupants += values[occupants] if occupancies: # store average occupants values['occupants_None'] = tot_occupants / len(occupancies) area = float(asset_node.get('area', 1)) ass = Asset(idx, idxs, number, location, values, area, deductibles, insurance_limits, retrofitted, self.cost_calculator) self.assets.append(ass)
def talker(): global u, VITESSE cmd_publisher = rospy.Publisher('cmd_drive', cmd_drive, queue_size=10) data_pub = rospy.Publisher('floats', numpy_msg(Floats), queue_size=10) rospy.Subscriber("/IMU_F", Imu, ImuCallback_F) #rospy.Subscriber("/IMU_R", Imu, ImuCallback_R) #rospy.Subscriber("/odom", Odometry, OdomCallback) rospy.Subscriber("/GPS/fix", NavSatFix, retour_gps) rospy.init_node('SSA', anonymous=True) r = rospy.Rate(100) # 100hz simul_time = rospy.get_param('~simulation_time', '10') # Trajectoire de reference ref = np.loadtxt( '/home/summit/Spidoo_ws/src/LMPC/spido_lmpc/scripts/matS.txt') ref[:, 1] = ref[:, 1] - ref[0, 1] ref[:, 2] = ref[:, 2] - ref[0, 2] dpsi = Psi - ref[0, 3] ref[:, 3] = ref[:, 3] + dpsi RR = np.array([[cos(dpsi), -sin(dpsi), X], [sin(dpsi), cos(dpsi), Y], [0, 0, 1]]) FF = np.array([ np.transpose(ref[:, 1]), np.transpose(ref[:, 2]), np.ones((1, len(ref))) ]) A = RR.dot(FF) ref[:, 1:2] = np.transpose(A[0]) ref[:, 2:3] = np.transpose(A[1]) line = geom.LineString(ref[:, 1:3]) vitesse = VITESSE Epsi = np.array([[0], [0]]) Xm = np.array([[0], [0]]) Psiref_old = Psi theta_tilde_old = 0 ey_old = 0 DeltaR = 0 DeltaF = 0 betaF = 0 betaR = 0 y_obs = 0 theta_tilde_obs = 0 t0 = rospy.get_time() cmd = cmd_drive() cmd.linear_speed = vitesse while ((rospy.get_time() - t0 <= simul_time)): #subprocess.check_call("rosservice call /gazebo/pause_physics", shell=True) point = geom.Point(X, Y) nearest_pt = line.interpolate(line.project(point)) distance, index = spatial.KDTree(ref[:, 1:3]).query(nearest_pt) xref = ref[index, 1] yref = ref[index, 2] Psiref = ref[index, 3] c = ref[index, 7] vyref = 0 ### Calculate DCapR DCapR = (Psiref - Psiref_old) / Tsamp Psiref_old = Psiref ### theta_tilde_dot ey = -sin(Psiref) * (X - xref) + cos(Psiref) * (Y - yref ) ## Lateral error ey_dot = (ey - ey_old) / Tsamp ey_old = ey theta_tilde = (Psi - Psiref) ## angular error theta_tilde_dot = (theta_tilde - theta_tilde_old) / Tsamp theta_tilde_old = theta_tilde ### Side-slip angles Observer # betaF=0 # betaR=0 #betaF,betaR,Epsi=LinearObsvBFBr(ey,theta_tilde,DeltaF,DeltaR,vitesse,c,Epsi,DCapR,theta_tilde_dot) # Linear Observer ### No Linear Observer # betaF,betaR,Epsi=NLObserverBFBr(ey,theta_tilde,DeltaF,DeltaR,vitesse,c,Epsi,DCapR,theta_tilde_dot) # NL Linear Observer ### For Roland 2017 # fF=KinmaticModel(betaF,betaR,ey,theta_tilde,vitesse,DeltaF,c) # y_obs,theta_tilde_obs,Xm,fFB00,B=LinearKinmaticModel(y_obs,theta_tilde_obs,Xm,vitesse,DeltaF,c,betaF,betaR) # betaF,betaR=LenainObsvBFBr(ey,theta_tilde,y_obs,theta_tilde_obs,DeltaF,DeltaR,vitesse,c,theta_tilde_dot,ey_dot,fFB00,fF,B) #### Linear Model EMAD MAHROUS NLObserver_Linear_BFBr betaF, betaR, Epsi = NLObserver_Linear_BFBr(ey, theta_tilde, DeltaF, DeltaR, vitesse, c, Epsi, DCapR, theta_tilde_dot) ## # print('y_obs=',y_obs) # print('y=',ey) # if (betaF < -0.087266463): betaF = -0.087266463 if (betaR < -0.087266463): betaR = -0.087266463 if (betaR > 0.087266463): betaR = 0.087266463 if (betaF > 0.087266463): betaF = 0.087266463 aa = betaF * (180 / pi) bb = betaR * (180 / pi) # y_obs,theta_tilde_obs,Xm=KinmaticModel(betaF,betaR,ey,-theta_tilde,Xm,vitesse,DeltaF,c) # betaF,betaR=LenainObsvBFBr(ey,-theta_tilde,y_obs,theta_tilde_obs,DeltaF,DeltaR,vitesse,c,theta_tilde_dot,ey_dot) # # print('y_obs=\n',y_obs) # print('y=\n',ey) print('betaF=', aa) print('betaR=', bb) ### Adaptative controller u = deltaFunction(ey, theta_tilde, c, betaF, betaR) if abs(u) > 20 * (pi / 180): u = 20 * (pi / 180) * sign(u) DeltaF = u cmd.steering_angle_front = u #[0,0] cmd.steering_angle_rear = DeltaR #u[1,0] cmd_publisher.publish(cmd) posture = np.array([ X, Y, Psi, ey, theta_tilde, Psip, u, xref, yref, Psiref, betaF, betaR ], dtype=np.float32) data_pub.publish(posture) #subprocess.check_call("rosservice call /gazebo/unpause_physics", shell=True) #unpause = rospy.ServiceProxy('/gazebo/unpause_physics', Empty) r.sleep() cmd.steering_angle_front = 0 #u[0,0] cmd.steering_angle_rear = 0 #u[0,0] cmd.linear_speed = 0 cmd_publisher.publish(cmd)
def region(self, lat, lon): """ Return a region code matching the provided position. If the position is not found inside any region return None. """ # Look up point in RTree of buffered region envelopes. # This is a coarse-grained but very fast match. point = geometry.Point(lon, lat) codes = set([ self._tree_ids[id_] for id_ in self._tree.intersection(point.bounds) ]) if not codes: return None # match point against the buffered polygon shapes buffered_codes = set([ code for code in codes if self._buffered_shapes[code].contains(point) ]) if len(buffered_codes) < 2: return tuple(buffered_codes)[0] if buffered_codes else None # match point against the precise polygon shapes precise_codes = set([ code for code in buffered_codes if self._prepared_shapes[code].contains(point) ]) if len(precise_codes) == 1: return tuple(precise_codes)[0] # Use distance from the border of each region as the tie-breaker. distances = {} # point wasn't in any precise region, which one of the buffered # regions is it closest to? if not precise_codes: for code in buffered_codes: coords = [] if isinstance(self._shapes[code].boundary, geometry.base.BaseMultipartGeometry): for geom in self._shapes[code].boundary.geoms: coords.extend([coord for coord in geom.coords]) else: coords = self._shapes[code].boundary.coords for coord in coords: distances[geocalc.distance(coord[1], coord[0], lat, lon)] = code return distances[min(distances.keys())] # point was in multiple overlapping regions, take the one where it # is farthest away from the border / the most inside a region for code in precise_codes: coords = [] if isinstance(self._shapes[code].boundary, geometry.base.BaseMultipartGeometry): for geom in self._shapes[code].boundary.geoms: coords.extend([coord for coord in geom.coords]) else: coords = self._shapes[code].boundary.coords for coord in coords: distances[geocalc.distance(coord[1], coord[0], lat, lon)] = code return distances[max(distances.keys())]
def process(context, arguments): df_trips, df_primary, random_seed = arguments # Set up RNG random = np.random.RandomState(context.config("random_seed")) maximum_iterations = context.config("secloc_maximum_iterations") # Set up discretization solver destinations = context.data("destinations") candidate_index = CandidateIndex(destinations) discretization_solver = CustomDiscretizationSolver(candidate_index) # Set up distance sampler distance_distributions = context.data("distance_distributions") distance_sampler = CustomDistanceSampler( maximum_iterations=min(1000, maximum_iterations), random=random, distributions=distance_distributions) # Set up relaxation solver; currently, we do not consider tail problems. chain_solver = GravityChainSolver(random=random, eps=10.0, lateral_deviation=10.0, alpha=0.1, maximum_iterations=min( 1000, maximum_iterations)) tail_solver = AngularTailSolver(random=random) free_solver = CustomFreeChainSolver(random, candidate_index) relaxation_solver = GeneralRelaxationSolver(chain_solver, tail_solver, free_solver) # Set up assignment solver thresholds = dict(car=200.0, car_passenger=200.0, pt=200.0, bike=100.0, walk=100.0) assignment_objective = DiscretizationErrorObjective(thresholds=thresholds) assignment_solver = AssignmentSolver( distance_sampler=distance_sampler, relaxation_solver=relaxation_solver, discretization_solver=discretization_solver, objective=assignment_objective, maximum_iterations=min(20, maximum_iterations)) df_locations = [] df_convergence = [] last_person_id = None for problem in find_assignment_problems(df_trips, df_primary): result = assignment_solver.solve(problem) starting_activity_index = problem["activity_index"] for index, (identifier, location) in enumerate( zip(result["discretization"]["identifiers"], result["discretization"]["locations"])): df_locations.append( (problem["person_id"], starting_activity_index + index, identifier, geo.Point(location))) df_convergence.append((result["valid"], problem["size"])) if problem["person_id"] != last_person_id: last_person_id = problem["person_id"] context.progress.update() df_locations = pd.DataFrame.from_records( df_locations, columns=["person_id", "activity_index", "location_id", "geometry"]) df_locations = gpd.GeoDataFrame(df_locations, crs="EPSG:2154") assert not df_locations["geometry"].isna().any() df_convergence = pd.DataFrame.from_records(df_convergence, columns=["valid", "size"]) return df_locations, df_convergence
def add_area_land_sfc(self, maxheight, ls=None, silent=False): """ land use inside an area around the trajectory add the shape to ``self.shapes['shp_below{:.1f}km']`` and the statistics to ``self.stat_ls['occ_shp_below{:.1f}km']`` sizes of circle (in 24h steps): .. code-block:: python [ 0.05 , 0.15 , 0.4 , 0.77666667, 1.25666667, 1.81666667, 2.43333333, 3.08333333, 3.74333333, 4.39 , 5.] Args: maxheight: maximum height in meters or md for MIXDEPTH ls (:class:`trace_source.land_sfc.land_sfc`, optional): pre loaded land surface information (separate loading consumes lot of time) silent (bool, optional): verbose output or not .. deprecated:: 0.1 use the ensemble trajectories instead """ import shapely.geometry as sgeom from shapely.ops import cascaded_union if maxheight == 'md': maxheight = self.data[1]['MIXDEPTH'] maxheightstr = 'md' else: maxheightstr = '{:.1f}'.format(maxheight / 1000.) lat = self.data[1]['latitude'][self.data[1]['height'] < maxheight] lon = self.data[1]['longitude'][self.data[1]['height'] < maxheight] age = self.data[1]['age'][self.data[1]['height'] < maxheight] if lat.shape[0] > 0: r = np.abs(age)**2 * (5. / 240**2) p = np.poly1d([ -2.81314300e-07, 1.50462963e-04, 7.17592593e-04, 5.00000000e-02 ]) r = p(abs(age)) r[r < 0.05] = 0.05 #track = sgeom.LineString(zip(lon, lat)) #track = track.buffer(0.1) shape = [] for coord in zip(lat, lon, r): shape.append(sgeom.Point(coord[1], coord[0]).buffer(coord[2])) #shape.append(track) shape = cascaded_union(shape) # check if shape is a Polygon; if yes convert it to Multipolygon if shape.geom_type == 'Polygon': shape = sgeom.MultiPolygon([shape]) if ls is None: ls = trace_source.land_sfc.land_sfc() occ_stat = namedtuple('occ_stat', 'no_below counter') cat = ls.get_land_sfc_shape(shape).compressed() no = float(cat.shape[0]) if cat.shape[0] > 0 else -1 c = { x: cat.tolist().count(x) / float(no) for x in list(ls.categories.keys()) } key = 'shp_below{}km'.format(maxheightstr) self.shapes[key] = shape key = 'occ_shp_below{}km'.format(maxheightstr) self.stat_ls[key] = occ_stat(no_below=no, counter=c) return shape else: #print('! nothin below ', maxheight) print('! nothin below ')
def iter_oriented_line_pt_idx(to_orient: String_or_Ring, path: String_or_Ring, buffer: Optional[float] = None) -> Iterator[int]: """Yields a subset of the point indexes of a line based upon the orientation of the path. Note: if no buffer is provided the line points must be on the path. Point indexes are iterated based upon the order in which they are captured by the path. A point index is captured and yielded if its associated point: 1. Is within the buffer distance of the current path segment (default buffer is zero) 2. Has not been captured by a previous segment (the first capturing path segment lays claim to each line point) 3. If a segment captures multiple points: is the next closest point to the *first* path segment point In the case of a ring-like line, if the repeated ring end point is captured then at least one of the immediate neighboring points must also be captured (error otherwise). Note that if a line orientation turns out to be ambiguous- e.g. the case where a path captures only index 0 and index 2 of a 4-point line- the line orientation is assumed to be the originally supplied orientation. """ if buffer is None: buffer = 0 else: buffer = float(buffer) if isinstance(to_orient, (geo.LineString, geo.LinearRing)): if not to_orient.is_simple: raise GeometryError( "simple linear objects are required for orientation") else: raise GeometryError("a line like geometry object is required") # the path will decide the resulting line index order coords_to_orient = list(to_orient.coords) points_to_orient = geo.MultiPoint(coords_to_orient) path_segments = geo.MultiLineString(list(iter_segments(path))) if len(path_segments) == 0: raise GeometryError("failed to break path into segments") # sorting dictionaries lpdx_to_sdx_dict: DefaultDict[int, Set[int]] = DefaultDict(set) lpdx_to_min_dist_dict: Dict[int, float] = dict() sdx_to_lpdx_dict: Dict[int, List[int]] = DefaultDict(list) if to_orient.is_closed: # handle ring-like line repeated_point = points_to_orient[0] # get the capturing segment min_distance_to_repeated_point = min( repeated_point.distance(seg) for seg in path_segments) # if the repeated ring end point is NOT captured, ignore; otherwise this needs handling if min_distance_to_repeated_point <= buffer: # repeated point is captured - needs handling p_second, p_second_to_last = points_to_orient[1], points_to_orient[ -2] # get min distances for second and second-to-last points (are separate points since the line is simple) d_second = min(p_second.distance(seg) for seg in path_segments) d_second_to_last = min( p_second_to_last.distance(seg) for seg in path_segments) # at least one of the two neighboring points to the repeated ring end point must be captured captured_dict = { 1: d_second <= buffer, -2: d_second_to_last <= buffer } if True in captured_dict.values(): if captured_dict[1] and captured_dict[-2]: # both neighbors captured; discard both ring end points (they aren't needed) points_to_orient = geo.MultiPoint(points_to_orient[1:-1]) elif captured_dict[1]: # only pdx==1 captured, discard last line point points_to_orient = geo.MultiPoint(points_to_orient[:-1]) elif captured_dict[-2]: # only pdx==-2 captured, discard first line point points_to_orient = geo.MultiPoint(points_to_orient[1:]) else: raise GeometryError( "invalid path given to orient the ring-like line; path must capture a neighbor " "of the repeated ring end points") # for adjusting final index results later try: lpdx_adjustment: int = {True: 1, False: 0}[captured_dict[-2]] except NameError: lpdx_adjustment = 0 # point capture algorithm lpdx: int lp: geo.Point for (lpdx, lp) in enumerate(points_to_orient): seg: geo.LineString # match line point indexes with path segment indexes for sdx, seg in enumerate(path_segments): d: float = lp.distance(seg) # associate each line point index with path segment indexes within the buffer if d <= lpdx_to_min_dist_dict.get(lpdx, buffer): if d < lpdx_to_min_dist_dict.get(lpdx, buffer): # keep only the closest path segment lpdx_to_sdx_dict[lpdx].clear() lpdx_to_min_dist_dict[lpdx] = d lpdx_to_sdx_dict[lpdx].add(sdx) # point may fall on multiple segments try: # the Jacqueline Rule: the first segment in the series captures the point closest_sdx = min(lpdx_to_sdx_dict[lpdx]) except ValueError: # un-captured lp (lp not within buffer for any segments) pass else: # captured lp sdx_to_lpdx_dict[closest_sdx].append(lpdx) del lpdx, lp sdx_to_lpdx_sorted = dict(sorted(sdx_to_lpdx_dict.items())) # now have a 1 to 1 relationship for segments and points (but may be multiple points per segment) lpdx_bag: List[int] if all(len(lpdx_bag) == 1 for lpdx_bag in sdx_to_lpdx_sorted.values()): # ALL 1 to 1 yield from (lpdx for (lpdx, ) in sdx_to_lpdx_sorted.values()) else: # some not 1 to 1 sdx: int for sdx, lpdx_bag in sdx_to_lpdx_sorted.items(): if len(lpdx_bag) == 1: # 1 to 1 yield lpdx_bag[0] + lpdx_adjustment else: # not 1 to 1; iterate based on distance to first segment point dist_lpdx_sub_list: List[Tuple[float, int]] = [] seg_point = geo.Point(path_segments[sdx].coords[0]) lpdx: int for lpdx in lpdx_bag: d = seg_point.distance(points_to_orient[lpdx]) dist_lpdx_sub_list.append((d, lpdx)) dist_lpdx_sub_list_sorted = sorted(dist_lpdx_sub_list) yield from (lpdx + lpdx_adjustment for _, lpdx in dist_lpdx_sub_list_sorted)
def sampled_area_fraction(leg_distances, turn_angles, x0=0.0, y0=0.0, radius_of_turn=6.0, satellite_footprint_radius=7.5, sensor_footprint_radius=1.0, true_airspeed=140.0, plot_figure=True): # Add footprint footprint = spg.Point(0, 0) footprint = footprint.buffer(satellite_footprint_radius) if plot_figure: from matplotlib.patches import Arc plt.figure() ax = plt.axes() plt.plot(*footprint.exterior.xy, color="red", linestyle="--") pointing_angle = 0.0 pos = (x0, y0) tracks = [] for i_leg in range(len(leg_distances)): # Straight line segment new_pos = end_point(pos, pointing_angle, leg_distances[i_leg]) if plot_figure: plt.plot([pos[0], new_pos[0]], [pos[1], new_pos[1]], color="black") line = spg.LineString([pos, new_pos]) tracks.append(line) pos = new_pos if i_leg == (len(leg_distances) - 1): break # Turn centre, endpoint = arc_centre_endpoint(pos, pointing_angle, radius_of_turn, turn_angles[i_leg]) if plot_figure: if turn_angles[i_leg] < 0: t1 = 360.0 - pointing_angle t2 = t1 + np.abs(turn_angles[i_leg]) print(pointing_angle, t1, t2) else: t1 = 180 - (turn_angles[i_leg]) - pointing_angle t2 = t1 + (turn_angles[i_leg]) a = Arc(centre, 2 * radius_of_turn, 2 * radius_of_turn, angle=0.0, theta1=t1, theta2=t2) ax.add_artist(a) pointing_angle += turn_angles[i_leg] pos = endpoint distance_in_footprint = 0.0 sampled_areas = [] for l in tracks: distance_in_footprint += l.intersection(footprint).length lbuff = l.buffer(sensor_footprint_radius) if plot_figure: plt.plot(*lbuff.exterior.xy, color="blue", linestyle="--") sampled_areas.append(lbuff.intersection(footprint)) sampled_area = sops.unary_union(sampled_areas) saf = sampled_area.area / footprint.area if plot_figure: if isinstance(sampled_area, spg.MultiPolygon): for poly in sampled_area: plt.plot(*poly.exterior.xy, color="green") else: plt.plot(*sampled_area.exterior.xy, color="green") # Set limits plt.xlim(-40, 40) plt.ylim(-40, 40) ax.set_aspect("equal", "box") tdist = total_distance(leg_distances, turn_angles, radius_of_turn) total_time = tdist * 1000.0 / true_airspeed / 60.0 if plot_figure: print( f"sampled distance (km): {distance_in_footprint:.2f}, total time (min): {total_time:.1f}, " f"sampled area (km^2): {sampled_area.area}, sampled area fraction: {saf}" ) return saf
def feature(): unit_circle = sgeom.Point(0, 0).buffer(0.5) unit_square = unit_circle.envelope geoms = [unit_circle, unit_square] feature = ShapelyFeature(geoms, ccrs.PlateCarree()) return feature
x0_int, y0_int = (r0s_int(phis_int_list) * np.cos(phis_int_list), r0s_int(phis_int_list) * np.sin(phis_int_list)) gridnum = 100 xm = np.linspace(-scanL / 2, scanL / 2, gridnum) ym = np.linspace(-scanL / 2, scanL / 2, gridnum) domain_wall = geom.LineString(np.transpose([x0_int, y0_int])) grid_dist = np.zeros((gridnum, gridnum)) dist = 0 t1 = time.time() for j in range(gridnum): for i in range(gridnum): point = geom.Point(xm[i], ym[j]) point_on_dw = domain_wall.interpolate(domain_wall.project(point)) dist = domain_wall.distance(point) if (point.x**2 + point.y**2 < point_on_dw.x**2 + point_on_dw.y**2): dist = -dist grid_dist[j, i] = dist t2 = time.time() print(t2 - t1) # BLOCH dw = 30.0e-9 mz = np.tanh(grid_dist / dw) #---------------- PLOTS ------------------------------------------
def test_almost_equals_default(self): p1 = geometry.Point(1.0, 1.0) p2 = geometry.Point(1.0 + 1e-7, 1.0 + 1e-7) # almost equal to 6 places p3 = geometry.Point(1.0 + 1e-6, 1.0 + 1e-6) # not almost equal self.assertTrue(p1.almost_equals(p2)) self.assertFalse(p1.almost_equals(p3))
def has_coord(self, lon, lat): return self.has_point(geometry.Point(lon, lat))
def test_almost_equals(self): p1 = geometry.Point(1.0, 1.0) p2 = geometry.Point(1.1, 1.1) self.assertFalse(p1.equals(p2)) self.assertTrue(p1.almost_equals(p2, 0)) self.assertFalse(p1.almost_equals(p2, 1))
def is_land(x, y): return land.contains(sgeom.Point(x, y))
def test_equals_exact(self): p1 = geometry.Point(1.0, 1.0) p2 = geometry.Point(2.0, 2.0) self.assertFalse(p1.equals(p2)) self.assertFalse(p1.equals_exact(p2, 0.001))
def roiPlot(inifile, CurrentVideo): config = ConfigParser() config.read(inifile) ## get dataframe column name bplist = getBpHeaders(inifile) try: noAnimals = config.getint('ROI settings', 'no_of_animals') except NoOptionError: noAnimals = config.getint('General settings', 'animal_no') projectPath = config.get('General settings', 'project_path') try: wfileType = config.get('General settings', 'workflow_file_type') except NoOptionError: wfileType = 'csv' animalBodypartList = [] for bp in range(noAnimals): animalName = 'animal_' + str(bp + 1) + '_bp' animalBpName = config.get('ROI settings', animalName) animalBpNameX, animalBpNameY = animalBpName + '_x', animalBpName + '_y' animalBodypartList.append([animalBpNameX, animalBpNameY]) columns2grab = [item[0:2] for item in animalBodypartList] columns2grab = [item for sublist in columns2grab for item in sublist] try: multiAnimalIDList = config.get('Multi animal IDs', 'id_list') multiAnimalIDList = multiAnimalIDList.split(",") if multiAnimalIDList[0] != '': multiAnimalStatus = True print('Applying settings for multi-animal tracking...') else: multiAnimalStatus = False for animal in range(noAnimals): multiAnimalIDList.append('Animal_' + str(animal+1) + '_') print('Applying settings for classical tracking...') except NoSectionError: multiAnimalIDList = [] for animal in range(noAnimals): multiAnimalIDList.append('Animal_' + str(animal + 1) + '_') multiAnimalStatus = False print('Applying settings for classical tracking...') logFolderPath = os.path.join(projectPath, 'logs') vidInfPath = os.path.join(logFolderPath, 'video_info.csv') vidinfDf = pd.read_csv(vidInfPath) csv_dir_in = os.path.join(projectPath, 'csv', 'outlier_corrected_movement_location') frames_dir_out = os.path.join(projectPath, 'frames', 'output', 'ROI_analysis') if not os.path.exists(frames_dir_out): os.makedirs(frames_dir_out) ROIcoordinatesPath = os.path.join(logFolderPath, 'measures', 'ROI_definitions.h5') try: rectanglesInfo = pd.read_hdf(ROIcoordinatesPath, key='rectangles') except FileNotFoundError: print('No ROIs found: please define ROIs using the left-most menu in teh SIMBA ROI tab.') circleInfo = pd.read_hdf(ROIcoordinatesPath, key='circleDf') polygonInfo = pd.read_hdf(ROIcoordinatesPath, key='polygons') CurrentVideoPath = os.path.join(projectPath, 'videos', CurrentVideo) cap = cv2.VideoCapture(CurrentVideoPath) CurrentVideoName, videoFileType = os.path.splitext(CurrentVideo)[0], os.path.splitext(CurrentVideo)[1] fps = cap.get(cv2.CAP_PROP_FPS) width, height, frames = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)), int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) mySpaceScale, myRadius, myResolution, myFontScale = 25, 10, 1500, 0.8 maxResDimension = max(width, height) DrawScale = int(myRadius / (myResolution / maxResDimension)) textScale = float(myFontScale / (myResolution / maxResDimension)) fourcc = cv2.VideoWriter_fourcc(*'XVID') videoSettings = vidinfDf.loc[vidinfDf['Video'] == str(CurrentVideoName)] currFps = int(videoSettings['fps']) noRectangles = len(rectanglesInfo.loc[rectanglesInfo['Video'] == str(CurrentVideoName)]) noCircles = len(circleInfo.loc[circleInfo['Video'] == str(CurrentVideoName)]) noPolygons = len(polygonInfo.loc[polygonInfo['Video'] == str(CurrentVideoName)]) rectangleTimes, rectangleEntries = ([[0] * len(animalBodypartList) for i in range(noRectangles)] , [[0] * len(animalBodypartList) for i in range(noRectangles)]) circleTimes, circleEntries = ([[0] * len(animalBodypartList) for i in range(noCircles)], [[0] * len(animalBodypartList) for i in range(noCircles)]) polygonTime, polyGonEntries = ([[0] * len(animalBodypartList) for i in range(noPolygons)], [[0] * len(animalBodypartList) for i in range(noPolygons)]) currFrameFolderOut = os.path.join(frames_dir_out, CurrentVideoName + '.avi') Rectangles = (rectanglesInfo.loc[rectanglesInfo['Video'] == str(CurrentVideoName)]) Circles = (circleInfo.loc[circleInfo['Video'] == str(CurrentVideoName)]) Polygons = (polygonInfo.loc[polygonInfo['Video'] == str(CurrentVideoName)]) rectangleEntryCheck = [[True] * len(animalBodypartList) for i in range(noRectangles)] circleEntryCheck = [[True] * len(animalBodypartList) for i in range(noCircles)] polygonEntryCheck = [[True] * len(animalBodypartList) for i in range(noPolygons)] currDfPath = os.path.join(csv_dir_in, CurrentVideoName + '.' + wfileType) currDf = read_df(currDfPath, wfileType) try: currDf = currDf.set_index('scorer') except KeyError: pass currDf = currDf.loc[:, ~currDf.columns.str.contains('^Unnamed')] currDf.columns = bplist currDf = currDf[columns2grab] writer = cv2.VideoWriter(currFrameFolderOut, fourcc, fps, (width*2, height)) RectangleColors = [(255, 191, 0), (255, 248, 240), (255,144,30), (230,224,176), (160, 158, 95), (208,224,63), (240, 207,137), (245,147,245), (204,142,0), (229,223,176), (208,216,129)] CircleColors = [(122, 160, 255), (0, 69, 255), (34,34,178), (0,0,255), (128, 128, 240), (2, 56, 121), (21, 113, 239), (5, 150, 235), (2, 106, 253), (0, 191, 255), (98, 152, 247)] polygonColor = [(0, 255, 0), (87, 139, 46), (152,241,152), (127,255,0), (47, 107, 85), (91, 154, 91), (70, 234, 199), (20, 255, 57), (135, 171, 41), (192, 240, 208), (131,193, 157)] animalColors = [] cmap = cm.get_cmap('Set1', noAnimals) for i in range(cmap.N): rgb = list((cmap(i)[:3])) rgb = [i * 255 for i in rgb] rgb.reverse() animalColors.append(rgb) currRow = 0 currentPoints = np.empty((noAnimals, 2), dtype=int) while (cap.isOpened()): ret, img = cap.read() if ret == True: addSpacer = 2 spacingScale = int(mySpaceScale / (myResolution / maxResDimension)) borderImage = cv2.copyMakeBorder(img, 0,0,0,int(width), borderType=cv2.BORDER_CONSTANT, value=[0, 0, 0]) borderImageHeight, borderImageWidth = borderImage.shape[0], borderImage.shape[1] for animal in range(len(currentPoints)): currentPoints[animal][0], currentPoints[animal][1] = currDf.at[currRow, animalBodypartList[animal][0]], currDf.at[currRow, animalBodypartList[animal][1]] cv2.circle(borderImage, (currentPoints[animal][0], currentPoints[animal][1]), DrawScale, animalColors[animal], -1) cv2.putText(borderImage, str(multiAnimalIDList[animal]), (currentPoints[animal][0], currentPoints[animal][1]) ,cv2.FONT_HERSHEY_TRIPLEX, textScale, animalColors[animal], 2) addSpacer += 1 for rectangle in range(noRectangles): topLeftX, topLeftY = (Rectangles['topLeftX'].iloc[rectangle], Rectangles['topLeftY'].iloc[rectangle]) bottomRightX, bottomRightY = (topLeftX + Rectangles['width'].iloc[rectangle], topLeftY + Rectangles['height'].iloc[rectangle]) rectangleName = Rectangles['Name'].iloc[rectangle] cv2.rectangle(borderImage, (topLeftX, topLeftY), (bottomRightX, bottomRightY), RectangleColors[rectangle], DrawScale) for animal in range(len(currentPoints)): cv2.putText(borderImage, str(rectangleName) + ' ' + str(multiAnimalIDList[animal]) + ' timer:', ((width + 5), (height - (height + 10) + spacingScale*addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, RectangleColors[rectangle], 2) if (((topLeftX-10) <= currentPoints[animal][0] <= (bottomRightX+10)) and ((topLeftY-10) <= currentPoints[animal][1] <= (bottomRightY+10))): rectangleTimes[rectangle][animal] = round((rectangleTimes[rectangle][animal] + (1 / currFps)), 2) if rectangleEntryCheck[rectangle][animal] == True: rectangleEntries[rectangle][animal] += 1 rectangleEntryCheck[rectangle][animal] = False else: rectangleEntryCheck[rectangle][animal] = True cv2.putText(borderImage, str(rectangleTimes[rectangle][animal]), ((int(borderImageWidth-(borderImageWidth/8))), (height - (height + 10) + spacingScale*addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, RectangleColors[rectangle], 2) addSpacer += 1 cv2.putText(borderImage, str(rectangleName) + ' ' + str(multiAnimalIDList[animal]) + ' entries:', ((width + 5), (height - (height + 10) + spacingScale*addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, RectangleColors[rectangle], 2) cv2.putText(borderImage, str(rectangleEntries[rectangle][animal]), ((int(borderImageWidth-(borderImageWidth/8))), (height - (height + 10) + spacingScale*addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, RectangleColors[rectangle], 2) addSpacer += 1 for circle in range(noCircles): circleName, centerX, centerY, radius = (Circles['Name'].iloc[circle], Circles['centerX'].iloc[circle], Circles['centerY'].iloc[circle], Circles['radius'].iloc[circle]) cv2.circle(borderImage, (centerX, centerY), radius, CircleColors[circle], DrawScale) for animal in range(len(currentPoints)): cv2.putText(borderImage, str(circleName) + ' ' + str(multiAnimalIDList[animal]) + ' timer:', ((width + 5), (height - (height + 10) + spacingScale*addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, CircleColors[circle], 2) euclidPxDistance = int(np.sqrt((currentPoints[animal][0] - centerX) ** 2 + (currentPoints[animal][1] - centerY) ** 2)) if euclidPxDistance <= radius: circleTimes[circle][animal] = round((circleTimes[circle][animal] + (1 / currFps)),2) if circleEntryCheck[circle][animal] == True: circleEntries[circle][animal] += 1 circleEntryCheck[circle][animal] = False else: circleEntryCheck[circle][animal] = True cv2.putText(borderImage, str(circleTimes[circle][animal]), ((int(borderImageWidth-(borderImageWidth/8))), (height - (height + 10) + spacingScale*addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, CircleColors[circle], 2) addSpacer += 1 cv2.putText(borderImage, str(circleName) + ' ' + str(multiAnimalIDList[animal]) + ' entries:', ((width + 5), (height - (height + 10) + spacingScale*addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, CircleColors[circle], 2) cv2.putText(borderImage, str(circleEntries[circle][animal]), ((int(borderImageWidth-(borderImageWidth/8))), (height - (height + 10) + spacingScale*addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, CircleColors[circle], 2) addSpacer += 1 for polygon in range(noPolygons): PolygonName, vertices = (Polygons['Name'].iloc[polygon], Polygons['vertices'].iloc[polygon]) vertices = np.array(vertices, np.int32) cv2.polylines(borderImage, [vertices], True, polygonColor[polygon], thickness=DrawScale) for animal in range(len(currentPoints)): pointList = [] cv2.putText(borderImage, str(PolygonName) + ' ' + str(multiAnimalIDList[animal]) + ' timer:', ((width + 5), (height - (height + 10) + spacingScale * addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, polygonColor[polygon], 2) for i in vertices: point = geometry.Point(i) pointList.append(point) polyGon = geometry.Polygon([[p.x, p.y] for p in pointList]) CurrPoint = Point(int(currentPoints[animal][0]), int(currentPoints[animal][1])) polyGonStatus = (polyGon.contains(CurrPoint)) if polyGonStatus == True: polygonTime[polygon][animal] = round((polygonTime[polygon][animal] + (1 / currFps)), 2) if polygonEntryCheck[polygon][animal] == True: polyGonEntries[polygon][animal] += 1 polygonEntryCheck[polygon][animal] = False else: polygonEntryCheck[polygon][animal] = True cv2.putText(borderImage, str(polygonTime[polygon][animal]), ((int(borderImageWidth-(borderImageWidth/8))), (height - (height + 10) + spacingScale * addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, polygonColor[polygon], 2) addSpacer += 1 cv2.putText(borderImage, str(PolygonName) + ' ' + str(multiAnimalIDList[animal]) + ' entries:', ((width + 5), (height - (height + 10) + spacingScale*addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, polygonColor[polygon], 2) cv2.putText(borderImage, str(polyGonEntries[polygon][animal]), ((int(borderImageWidth-(borderImageWidth/8))), (height - (height + 10) + spacingScale*addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, polygonColor[polygon], 2) addSpacer += 1 borderImage = np.uint8(borderImage) writer.write(borderImage) # cv2.imshow('Window', borderImage) # key = cv2.waitKey(3000) # if key == 27: # cv2.destroyAllWindows() # # break currRow += 1 print('Frame: ' + str(currRow) + '/' + str(frames)) if img is None: print('Video ' + str(CurrentVideoName) + ' saved.') cap.release() break print('ROI videos generated in "project_folder/frames/ROI_analysis"')
#name = sfneigh['features'][0]['properties']['neighborhood'] #ID = sfneigh['features'][0]['properties']['id'] for i in range(len(shape_json)): if len(shape_json[i]) == 1: shape_json[i] = shape_json[i][0] poly_sf = [] for j in shape_json: poly_sf.append(geom.Polygon([(i[0], i[1]) for i in j])) first = gpd.GeoDataFrame(neighbor_sf_df, geometry=poly_sf, crs={'init': 'epsg:4326'}) lonlat = [ geom.Point(lon, lat) for lon, lat in zip(alldata.longitude, alldata.latitude) ] sf = gpd.GeoDataFrame(alldata, geometry=lonlat, crs={'init': 'epsg:4326'}) f1 = lambda x: any([x.within(i) for i in first.geometry]) sf_neighbor = map(f1, sf.geometry) sf_neighbor_df1 = sf.ix[sf_neighbor, :].reset_index() f2 = lambda x: [x.within(i) for i in first.geometry].index(True) neigh_range = map(f2, sf_neighbor_df1.geometry) id_neigh = [first.ix[i, 'id'] for i in neigh_range] sf_neighbor_df1['id_neigh'] = id_neigh sf_neighbor_expensive = sf_neighbor_df1.ix[ [i in [3, 4] for i in sf_neighbor_df1.price], :] restrant_expensive = sf_neighbor_expensive['id_neigh'].value_counts() restrant_num = sf_neighbor_df1['id_neigh'].value_counts()
def test_empty_point(self): self.assertTrue(sgeom.Point().is_empty)
def _GetPoint(point): """Gets a Point from a placemark.""" coord = point.coordinates.text.strip() return sgeo.Point(_SplitCoordinates(coord))
#import matplotlib as mpl #mpl.use('Agg') #from matplotlib import pyplot as plt #plt.imshow(matrix[0].T, origin='lower') #plt.savefig('oi.png') #import IPython #IPython.embed() if report_to is not None: report_to.write('\n') return matrix if __name__ == '__main__': with open(os.path.join("SimpleFunciona", "random-line.object")) as infile: obj = objects.ObjectFile.from_file(infile) polygon_list = [] for car in obj['car in line']: for sub_structure in car: polygon_list.append(sub_structure.as_polygon()) point = geometry.Point((1, 1)) #print(point.within(all_polygon)) matrix = calc_position_matrix((633, 456, 663, 531), polygon_list) print(matrix[0]) matrix_plot(matrix[1])
def sensor_plane_inter(sensor_slope, center_cor): slope_ = sensor_slope * np.pi / 180 fov_ = fov * np.pi / 180 # 过相机中心且与像素平面平行的直线方程为:y = kx + d_ # 像素平面的直线方程为:y = kx + C_ d_ = center_cor[1] - slope_ * center_cor[0] if math.pi / 2 < slope_ < math.pi: C_ = d_ - focal_length * np.sqrt(slope_**2 + 1) else: C_ = d_ + focal_length * np.sqrt(slope_**2 + 1) # 求fov右边界与sensor plane的交点坐标(x_intersection_right, y_intersection_right) k_right = np.tan(-(np.pi - fov_) / 2 + slope_) b_right = center_cor[1] - k_right * center_cor[0] x_intersection_right = (C_ - b_right) / (-slope_ + k_right) y_intersection_right = slope_ * x_intersection_right + C_ # 求fov左边界与sensor plane的交点坐标(x_intersection_left, y_intersection_left) k_left = np.tan(-(np.pi - fov_) / 2 - fov_ + slope_) b_left = center_cor[1] - k_left * center_cor[0] x_intersection_left = (C_ - b_left) / (-slope_ + k_left) y_intersection_left = slope_ * x_intersection_left + C_ # 计算ray(光心和坐标原点的连线)与 sensor plane的交点 ray_slope = center_cor[1] / center_cor[0] ray_inter_x = C_ / (ray_slope - slope_) ray_inter_y = ray_slope * ray_inter_x # 计算ray所属的pixel左右端点及其坐标 x_left = ((ray_inter_x - x_intersection_left) // pixel_size) * pixel_size + x_intersection_left y_left = slope_ * x_left + C_ x_right = ((ray_inter_x - x_intersection_left) // pixel_size + 1) * pixel_size + x_intersection_left y_right = slope_ * x_right + C_ print((y_intersection_right - y_intersection_left) / (x_intersection_right - x_intersection_left), slope_, (y_right - y_left) / (x_right - x_left)) inter_cor_set = [(x_left, y_left), (x_right, y_right)] # print(((ray_inter_x - x_intersection_left)//pixel_size)) # print(x_left, ray_inter_x, x_right) # print([(x_left, y_left), (x_right, y_right)]) sensor_line = sp.LineString([(x_intersection_right, y_intersection_right), (x_intersection_left, y_intersection_left)]) inter_circle = sp.Point(ray_inter_x, ray_inter_y).buffer(pixel_size / 2) inter_region = inter_circle.intersection(sensor_line) inter_cor_set_2 = list(inter_region.coords) print(inter_cor_set) print(inter_cor_set_2) print( np.sqrt((inter_cor_set[0][0] - inter_cor_set[1][0])**2 + (inter_cor_set[0][1] - inter_cor_set[1][1])**2)) print( np.sqrt((inter_cor_set_2[0][0] - inter_cor_set_2[1][0])**2 + (inter_cor_set_2[0][1] - inter_cor_set_2[1][1])**2)) print(pixel_size) return inter_cor_set
def test(self): 'Run tests' print 'Save and load a SHP file without attributes' path = self.getPath('.shp') geometry_store.save(path, geometry_store.proj4LL, shapelyGeometries) result = geometry_store.load(path) self.assertEqual(result[0].strip(), geometry_store.proj4LL) self.assertEqual(len(result[1]), len(shapelyGeometries)) print 'Save and load a SHP file with attributes' path = self.getPath('.shp') geometry_store.save(path, geometry_store.proj4LL, shapelyGeometries, fieldPacks, fieldDefinitions) result = geometry_store.load(path) self.assertEqual(len(result[2]), len(fieldPacks)) for shapelyGeometry, fieldPack in itertools.izip(result[1], result[2]): print for fieldValue, (fieldName, fieldType) in itertools.izip(fieldPack, result[3]): print '%s = %s' % (fieldName, fieldValue) print shapelyGeometry print 'Save a SHP file with attributes with different targetProj4' path = self.getPath('.shp') geometry_store.save(path, geometry_store.proj4LL, shapelyGeometries, fieldPacks, fieldDefinitions, targetProj4=geometry_store.proj4SM) result = geometry_store.load(path) self.assertNotEqual(result[0].strip(), geometry_store.proj4LL) print 'Load a SHP file with attributes with different targetProj4' path = self.getPath('.shp') geometry_store.save(path, geometry_store.proj4LL, shapelyGeometries, fieldPacks, fieldDefinitions) result = geometry_store.load(path, targetProj4=geometry_store.proj4SM) self.assertNotEqual(result[0].strip(), geometry_store.proj4LL) print 'Save and load a ZIP file without attributes using save' path = self.getPath('.shp.zip') geometry_store.save(path, geometry_store.proj4LL, shapelyGeometries) result = geometry_store.load(path) self.assertEqual(result[0].strip(), geometry_store.proj4LL) self.assertEqual(len(result[1]), len(shapelyGeometries)) print 'Save and load a ZIP file with attributes using save' path = self.getPath('.shp.zip') geometry_store.save(path, geometry_store.proj4LL, shapelyGeometries, fieldPacks, fieldDefinitions) result = geometry_store.load(path) self.assertEqual(len(result[2]), len(fieldPacks)) print 'Test saving and loading ZIP files of point coordinates' path = self.getPath('.shp.zip') geometry_store.save_points(path, geometry_store.proj4LL, [(0, 0)], fieldPacks, fieldDefinitions) result = geometry_store.load_points(path) self.assertEqual(result[1], [(0, 0)]) print 'Test get_transform_point' transform_point0 = geometry_store.get_transform_point(geometry_store.proj4LL, geometry_store.proj4LL) transform_point1 = geometry_store.get_transform_point(geometry_store.proj4LL, geometry_store.proj4SM) self.assertNotEqual(transform_point0(0, 0), transform_point1(0, 0)) print 'Test get_transform_geometry' transform_geometry = geometry_store.get_transform_geometry(geometry_store.proj4LL, geometry_store.proj4SM) self.assertEqual(type(transform_geometry(geometry.Point(0, 0))), type(geometry.Point(0, 0))) self.assertEqual(type(transform_geometry(ogr.CreateGeometryFromWkt('POINT (0 0)'))), type(ogr.CreateGeometryFromWkt('POINT (0 0)'))) print 'Test get_coordinateTransformation' geometry_store.get_coordinateTransformation(geometry_store.proj4LL, geometry_store.proj4SM) print 'Test get_spatialReference' geometry_store.get_spatialReference(geometry_store.proj4LL) with self.assertRaises(geometry_store.GeometryError): geometry_store.get_spatialReference('') print 'Test get_geometryType' geometry_store.get_geometryType(shapelyGeometries) print 'Test save() when a fieldPack has fewer fields than definitions' with self.assertRaises(geometry_store.GeometryError): path = self.getPath('.shp') geometry_store.save(path, geometry_store.proj4LL, shapelyGeometries, [x[1:] for x in fieldPacks], fieldDefinitions) print 'Test save() when a fieldPack has more fields than definitions' with self.assertRaises(geometry_store.GeometryError): path = self.getPath('.shp') geometry_store.save(path, geometry_store.proj4LL, shapelyGeometries, [x * 2 for x in fieldPacks], fieldDefinitions) print 'Test save() when the driverName is unrecognized' with self.assertRaises(geometry_store.GeometryError): path = self.getPath('.shp') geometry_store.save(path, geometry_store.proj4LL, shapelyGeometries, driverName='') print 'Test load() when format is unrecognized' with self.assertRaises(geometry_store.GeometryError): path = self.getPath('') geometry_store.load(path)
cv2.polylines(borderImage, [vertices], True, polygonColor[polygon], thickness=DrawScale) for animal in range(len(currentPoints)): pointList = [] cv2.putText( borderImage, str(PolygonName) + ' ' + str(multiAnimalIDList[animal]) + ' timer:', ((width + 5), (height - (height + 10) + spacingScale * addSpacer)), cv2.FONT_HERSHEY_TRIPLEX, textScale, polygonColor[polygon], 2) for i in vertices: point = geometry.Point(i) pointList.append(point) polyGon = geometry.Polygon([[p.x, p.y] for p in pointList]) CurrPoint = Point(int(currentPoints[animal][0]), int(currentPoints[animal][1])) polyGonStatus = (polyGon.contains(CurrPoint)) if polyGonStatus == True: polygonTime[polygon][animal] = round( (polygonTime[polygon][animal] + (1 / currFps)), 2) if polygonEntryCheck[polygon][animal] == True: polyGonEntries[polygon][animal] += 1 polygonEntryCheck[polygon][animal] = False else: polygonEntryCheck[polygon][animal] = True cv2.putText(borderImage, str(polygonTime[polygon][animal]), ((int(borderImageWidth - (borderImageWidth / 8))),
def test_almost_equals_default(self): p1 = geometry.Point(1.0, 1.0) p2 = geometry.Point(1.0 + 1e-7, 1.0 + 1e-7) # almost equal to 6 places p3 = geometry.Point(1.0 + 1e-6, 1.0 + 1e-6) # not almost equal self.failUnless(p1.almost_equals(p2)) self.failIf(p1.almost_equals(p3))
def path_to_geos(path, force_ccw=False): """ Create a list of Shapely geometric objects from a :class:`matplotlib.path.Path`. Parameters ---------- path A :class:`matplotlib.path.Path` instance. Other Parameters ---------------- force_ccw Boolean flag determining whether the path can be inverted to enforce ccw. Defaults to False. Returns ------- A list of instances of the following type(s): :class:`shapely.geometry.polygon.Polygon`, :class:`shapely.geometry.linestring.LineString` and/or :class:`shapely.geometry.multilinestring.MultiLineString`. """ # Convert path into numpy array of vertices (and associated codes) path_verts, path_codes = path_segments(path, curves=False) # Split into subarrays such that each subarray consists of connected # line segments based on the start of each one being marked by a # matplotlib MOVETO code. verts_split_inds = np.where(path_codes == Path.MOVETO)[0] verts_split = np.split(path_verts, verts_split_inds) codes_split = np.split(path_codes, verts_split_inds) # Iterate through the vertices generating a list of # (external_geom, [internal_polygons]) tuples. other_result_geoms = [] collection = [] for path_verts, path_codes in zip(verts_split, codes_split): if len(path_verts) == 0: continue if path_codes[-1] == Path.CLOSEPOLY: path_verts[-1, :] = path_verts[0, :] verts_same_as_first = np.isclose(path_verts[0, :], path_verts[1:, :], rtol=1e-10, atol=1e-13) verts_same_as_first = np.logical_and.reduce(verts_same_as_first, axis=1) if all(verts_same_as_first): geom = sgeom.Point(path_verts[0, :]) elif path_verts.shape[0] > 4 and path_codes[-1] == Path.CLOSEPOLY: geom = sgeom.Polygon(path_verts[:-1, :]) elif (matplotlib.__version__ < '2.2.0' and # XXX A path can be given which does not end with close poly, # in that situation, we have to guess? path_verts.shape[0] > 3 and verts_same_as_first[-1]): geom = sgeom.Polygon(path_verts) else: geom = sgeom.LineString(path_verts) # If geom is a Polygon and is contained within the last geom in # collection, add it to its list of internal polygons, otherwise # simply append it as a new external geom. if geom.is_empty: pass elif (len(collection) > 0 and isinstance(collection[-1][0], sgeom.Polygon) and isinstance(geom, sgeom.Polygon) and collection[-1][0].contains(geom.exterior)): collection[-1][1].append(geom.exterior) elif isinstance(geom, sgeom.Point): other_result_geoms.append(geom) else: collection.append((geom, [])) # Convert each (external_geom, [internal_polygons]) pair into a # a shapely Polygon that encapsulates the internal polygons, if the # external geom is a LineString leave it alone. geom_collection = [] for external_geom, internal_polys in collection: if internal_polys: # XXX worry about islands within lakes geom = sgeom.Polygon(external_geom.exterior, internal_polys) else: geom = external_geom # Correctly orientate the polygon (ccw) if isinstance(geom, sgeom.Polygon): if force_ccw and not geom.exterior.is_ccw: geom = sgeom.polygon.orient(geom) geom_collection.append(geom) # If the geom_collection only contains LineStrings combine them # into a single MultiLinestring. if geom_collection and all(isinstance(geom, sgeom.LineString) for geom in geom_collection): geom_collection = [sgeom.MultiLineString(geom_collection)] # Remove any zero area Polygons def not_zero_poly(geom): return ((isinstance(geom, sgeom.Polygon) and not geom._is_empty and geom.area != 0) or not isinstance(geom, sgeom.Polygon)) result = list(filter(not_zero_poly, geom_collection)) return result + other_result_geoms