def get_hexagon_points(self, num_points, area): sidelength = numpy.sqrt(self._array_of_areas * 2/(3 * numpy.sqrt(3))) height = 2 * self._hex_sidelengths width = numpy.sqrt(3) * self._hex_sidelengths xmin = -width/2 xmax = width/2 ymin = -height/2 ymax = height/2 hexagon = Hexagon(sidelength, width, height) xpoints = numpy.zeros(1, num_points) ypoints = numpy.zeros(1, num_points) count = 0 while count < num_points: xpt = xmin + (xmax - xmin) * numpy.random.rand() ypt = ymin + (ymax - ymin) * numpy.random.rand() pt = Point((xpt, ypt)) if pt.within(hexagon.get_polygon()): xpoints[count] = xpt ypoints[count] = ypt count = count + 1 else: continue xpoints = xpoints / width ypoints = ypoints/ height return xpoints, ypoints
def GetIntersectionDistance(self, l1, l2): # l1 should be just two coordinate positions # get its starting coorinate pt1 = Point(l1.coords[0]) x = l1.intersection(l2) # intersections can return a lot of things d = -1 if x.wkt == 'GEOMETRYCOLLECTION EMPTY': d = -1 # print "nothing" elif re.match('^POINT', x.wkt): # print "point" pt2 = Point(x.coords[0]) d = pt1.distance(pt2) elif re.match('^MULTI', x.wkt): # print "mpoint" # this will return the minimum distance pt2 = Point(x[0].coords[0]) d = pt1.distance(pt2) for pt2 in x: pt2 = Point(pt2) if d < pt1.distance(pt2): d = pt1.distance(pt2) else: print 'dunno what intersection passed me' return d
def is_at_goal(self): """Check if the robot is near its goal in both distance and rotation. Once the robot is near its goal, it rotates towards the final goal pose. Once it reaches this goal pose (within some predetermined rotation allowance), it is deemed to be at its goal. Returns ------- bool True if the robot is at its goal, false otherwise. """ goal_pt = Point(self.goal_pose[0:2]) approximation_circle = goal_pt.buffer(self.distance_allowance) pose_pt = Point(self.robot.pose2D.pose[0:2]) position_bool = approximation_circle.contains(pose_pt) logging.debug('Position is {}'.format(position_bool)) degrees_to_goal = abs(self.robot.pose2D.pose[2] - self.goal_pose[2]) orientation_bool = (degrees_to_goal < self.rotation_allowance) # <>TODO: Replace with ros goal message check if self.robot.publish_to_ROS is True: orientation_bool = True logging.debug('Orientation is {}'.format(orientation_bool)) logging.debug('is_at_goal = {}'.format((position_bool and orientation_bool))) return position_bool and orientation_bool
def find_dangles(lines): """ Locate all dangles :param lines: list of Shapely LineStrings or MultiLineStrings :return: list of dangles """ list_dangles = [] for i, line in enumerate(lines): # each line gets a number # go through each line added first to second # then second to third and so on shply_lines = lines[:i] + lines[i+1:] # 0 is start point and -1 is end point # run through for start_end in [0, -1]: # convert line to point node = Point(line.coords[start_end]) # Return True if any element of the iterable is true. # https://docs.python.org/2/library/functions.html#any # python boolean evaluation comparison if any(node.touches(next_line) for next_line in shply_lines): continue else: list_dangles.append(node) return list_dangles
def getNosePoints(line,prevPoint): start=Point(line.coords[0]) end=Point(line.coords[-1]) if start.distance(prevPoint)<end.distance(prevPoint): return start else: return end
def _populate_shapes(self): """ Set values for self._label_shapes, _footprint_shape, and others. """ point = Point(self.position.x, self.position.y) point_buffered = point.buffer(self.radius + self.buffer, 3) self._point_shape = point.buffer(self.radius, 3) scale = 10.0 font = truetype(self.fontfile, int(self.fontsize * scale), encoding='unic') x, y = self.position.x, self.position.y w, h = font.getsize(self.name) w, h = w/scale, h/scale for placement in placements: label_shape = point_label_bounds(x, y, w, h, self.radius, placement) mask_shape = label_shape.buffer(self.buffer, 2).union(point_buffered) self._label_shapes[placement] = label_shape self._mask_shapes[placement] = mask_shape unionize = lambda a, b: a.union(b) self._label_footprint = reduce(unionize, self._label_shapes.values()) self._mask_footprint = reduce(unionize, self._mask_shapes.values()) # number of pixels from the top of the label based on the bottom of a "." self._baseline = font.getmask('.').getbbox()[3] / scale
def parse(line): import shapefile line = line.split(",") datetime = line[2].split() try: #hour = int(time[0]) #minute = int(time[1]) #minute -= minute % 10 time = datetime[1] time = time[0:-1] + '0' x = float(line[9]) y = float(line[10]) except: print "*********************" print "Invalid Point, line is:", line #print "time is:", time return ("Invalid Point", 1) #print "**************************" #print "newLine is:", newLine sf = shapefile.Reader("../NY_counties_clip/NY_counties_clip") shapeRecs = sf.shapeRecords() point = Point(x, y) county = "Not found" for sr in shapeRecs: coords = sr.shape.points polygon = MultiPoint(coords).convex_hull if point.within(polygon): county = sr.record[6] newLine = (time + "," + county, 1) if county == "Not found": print "********************" print "County not found, point is:", x, y return newLine
def find_valid_edges(vertexes, edges, env, polygons): valid_edges = [] for i, p1 in enumerate(vertexes): print i for p2 in [x for x in vertexes if not x == p1]: add = True line2 = LineString([p1, p2]) if env.crosses(line2): continue xx, yy = line2.xy # Midpoint will lie within a shape if the line is composed of two vertices of the polygon m = Point(sum(xx)/2., sum(yy)/2.) if [x for x in polygons if m.within(x)]: continue # skip this edge if it is within a polygon for edge in edges: line1 = LineString(edge) if add and not line1 == line2 and line1.crosses(line2): add = False break if add: valid_edges.append([p1, p2]) return valid_edges
def gps_to_locality(lat, lon, basemap_obj, df_polygons, field_int, poly='poly'): poi = Point(basemap_obj(lon, lat)) for i, x in enumerate(df_polygons[poly]): if poi.within(x): # returns first match return df_polygons.iloc[i][field_int] return None
def project(p1, p2, p3): """Project a Point, p3 onto a line between Points p1 and p2. Uses Shapely and GEOS functions, which set distance to zero for all negative distances. Parameters: p1 (Point) : point at zero distance on line between p1 and p2. p2 (Point) : endpoint of line. p3 (Point) : the point to project. Returns: result (dict) : the projected Point, disctance along line, offset from line, and fractional distance along line. """ line = LineString([(p1.x, p1.y),(p2.x, p2.y)]) u = line.project(p3, normalized=True) d = line.project(p3, normalized=False) pt_xy = line.interpolate(d) pt = Point([pt_xy.x, pt_xy.y, p3.z]) # calculate the offset distance of p3 from the line if (p1.y - p2.y) * (p3.x - p2.x) - (p1.x - p2.x) * (p3.y - p2.y) < 0: offset = -pt.distance(p3) # the point is offset left of the line else: offset = pt.distance(p3) # the point is offset right of the line result = {'pt':pt, 'd':d, 'o':offset, 'u':u} return result
def fill_polygon_with_points(cls, goal=None, polygon=None): """ Fill a shapely polygon with X number of points """ if goal is None: raise ValueError("Must specify the number of points (goal) to fill the polygon with") if polygon is None or (not isinstance(polygon, Polygon) and not isinstance(polygon, MultiPolygon)): raise ValueError("Must specify a polygon to fill points with") minx = polygon.bounds[0] maxx = polygon.bounds[2] miny = polygon.bounds[1] maxy = polygon.bounds[3] points = [] now = time.time() while len(points) < goal: random_x = random.uniform(minx, maxx) random_y = random.uniform(miny, maxy) p = Point(random_x, random_y) if p.within(polygon): points.append(p) logger.info("Filling polygon with points took %f seconds" % (time.time() - now)) return points
def check(self, latitude, longitude): current_point = Point(latitude, longitude) cur_ts = datetime.utcnow() if current_point.within(self.START_POLY): if self.inside_area: return False else: self.inside_area = True return self.CHECKPOINT_START elif current_point.within(self.SECTOR2_POLY): if self.inside_area: return False else: self.inside_area = True return self.CHECKPOINT_SECTOR2 elif current_point.within(self.SECTOR3_POLY): if self.inside_area: return False else: self.inside_area = True return self.CHECKPOINT_SECTOR3 else: self.inside_area = False return False
def batch(num_features): # Coordinate values in range [0, 50] x = partial(random.uniform, 0.0, 50.0) # radii in range [0.01, 0.5] r = partial(random.uniform, 0.01, 0.5) # Poisson-distributed resolution k def k(expectation=1): #partial(random.randint, 1, 4) L = math.exp(-expectation) k = 0 p = 1 while p > L: k = k + 1 u = random.uniform(0.0, 1.0) p = p * u return k - 1 batch = {'index': []} for i in xrange(num_features): point = Point(x(), x()) polygon = point.buffer(r(), k()) batch['index'].append(dict( id=str(i+1), bbox=polygon.bounds, geometry=mapping(polygon), properties=dict(title='Feature %d' % (i+1))) ) return batch
def fuse_point_to_polygon(point, polygon): """ :param point: :param polygon: """ p = Point(point) min_distance = float("inf") min_index = -1 polygon = polygon[::-1] ##FIXME the received polygon is wrong. This is POG # Identify the nearest line segment. for i in range(len(polygon)): seg = polygon[i:i + 2] if len(seg) < 2: # close the polygon seg = [polygon[-1]] + [polygon[0]] line_segment = LineString(seg) dist = p.distance(line_segment) # print seg, dist if dist < min_distance: min_distance = dist min_index = i # print min_distance, min_index fused_polygon = polygon[min_index + 1:] + polygon[:min_index + 1] + [point] # fused_polygon = polygon[min_index + 1:] + [point] # aa = identify_first_point_in_polygon(fused_polygon) # print "FUSING", aa # print point, polygon, fused_polygon return fused_polygon
def pymol_select_memb_old(pdb: MyPDB) -> set(): """ print a pymol selection line for all residues that are in the membrane !!! this assumes that cntr is at 0 0 0 and norm at 15 0 0 !!! """ from shapely.geometry import LineString, Point # create Points from center & thickness cntr_pnt = Point(pdb.memb_res.cntr.x, pdb.memb_res.cntr.y, pdb.memb_res.cntr.z) thkn_m_pnt = Point(-pdb.memb_res.thkn.x, pdb.memb_res.thkn.y, pdb.memb_res.thkn.z) thkn_pnt = Point(pdb.memb_res.thkn.x, pdb.memb_res.thkn.y, pdb.memb_res.thkn.z) # define the line between center and thickness line = LineString([thkn_m_pnt, thkn_pnt]) thickness = cntr_pnt.distance(thkn_pnt) result = set() # iterate over all CAs in the pdb for cid in sorted(pdb.chains.keys()): for rid in sorted(pdb[cid].residues.keys()): atom = pdb[cid][rid]['CA'] # the atom as a Point p = Point(atom.xyz.x, atom.xyz.y, atom.xyz.z) # projection of the CA atom on the center-thickness line np = line.interpolate(line.project(p)) # if the distance on the center-thickness line is smaller than 15, than this is in the membrane if cntr_pnt.distance(np) < thickness-0.1: result.add(atom.res_seq_num) return result
def game_loop(self): while 1: for event in pygame.event.get(): if event.type == QUIT: return elif event.type == KEYDOWN and event.key == K_ESCAPE: return elif event.type == MOUSEBUTTONDOWN: pt = Point(pygame.mouse.get_pos()) #this list comprehension gets the clicked region pt_match = ([[key for key,val in regns.iteritems() if pt.intersects(Polygon(val))] for regns in self.all_regions]) if pt_match: #clear out the empty lists so that pt_match #only contains the region string pt_match = [match for match in pt_match if match] try: #send click info to the world object self.world.process_action(pt_match[0][0]) except IndexError: pass self.screen.blit(self.background, (0, 0)) pygame.display.flip()
def is_stable(lon, lat): """ Determine if point is located in the US stable tectonic region. Uses the same boundary as the US NSHMP and so this function needs to be modified to work outside of the US. Args: lon (float): Lognitude. lat (float): Latitude. Returns: bool: Is the point classified as tectonically stable. """ p = Point((lon, lat)) pfile = pkg_resources.resource_filename('shakelib.utils', os.path.join('data', 'cratons.geojson')) with open(pfile) as f: cratons = json.load(f) coord_list = cratons['features'][0]['geometry']['coordinates'] for clist in coord_list: poly = Polygon(clist[0]) if p.within(poly): return True return False
def Check_Poly(ev_lat,ev_lon): #St_poly = Polygon(((37.80375833333333,-122.26878055555555), # (37.80314722222222,-122.26916388888888), # (37.80622222222222,-122.277425), # (37.80683888888888,-122.277025))) '''St_poly = Polygon(((37.80174722222222,-122.27128888888889), (37.80349444444444,-122.27603055555555), (37.80786944444444,-122.27381666666666), (37.806755555555554,-122.26869166666667), (37.812374999999996,-122.26591111111111), (37.81121388888889,-122.2636), (37.806869444444445,-122.26615555555556), (37.80575833333333,-122.26838055555555)))''' '''poly = Polygon(((37.878160,-122.257492), # university Ave (37.873646,-122.284185), (37.870511,-122.309028), (37.863800,-122.305111), (37.869214,-122.254670)))''' # dt oakland (larger region) poly = Polygon(((37.812594,-122.270164), (37.822968,-122.267433), (37.821722,-122.259872), (37.797118,-122.249193), (37.787229,-122.263842), (37.794675,-122.289563), (37.805828,-122.288179), (37.802708,-122.278026))) point = Point((ev_lat,ev_lon)) in_poly = point.within(poly) return in_poly
def solve(f, R, t, r, g): # make the circle outerCircle = Point(0, 0).buffer(R, 1 << 12) # make the ring innerCircle = Point(0, 0).buffer(R - t - f, 1 << 12) # make the bars bars = [] leftMin = -r - (2 * r + g) * (int(R / (2 * r + g)) + 1) left = leftMin while left < R: bars.append(box(left, -R, left + 2 * r + 2 * f, R)) left += 2 * r + g bottom = leftMin while bottom < R: bars.append(box(-R, bottom, R, bottom + 2 * r + 2 * f)) bottom += 2 * r + g # get the union union = outerCircle.difference(innerCircle) for bar in bars: union = union.union(bar) # intersection with prev shape finalPattern = union.intersection(outerCircle) # calc area ratio result = finalPattern.area / outerCircle.area return '%.6f' % result
def check_geofence (self, x_utm = None, y_utm = None): ''' method to check to see if a point is within the geofence. If x_utm or y_utm is not given, this checks the present location. x_utm = the x utm to check (m) y_utm = the y utm to check (m) returns true (yes within geofence) or false (no not in geofence) ''' if x_utm is None and y_utm is None: x_utm = self.bs['x_utm'] y_utm = self.bs['y_utm'] using_present_location = True else: using_present_location = False test_point = Point (x_utm, y_utm) in_geofence = test_point.within (self.geofence) # update the been in geofence flag so it flips high if this is # the first call of this method inside of the geofence if using_present_location and in_geofence: self.been_in_geofence = True return (in_geofence)
def find_edge_nodes(fargs): """ Find nodes are near the edge of the hull""" cluster, cluster_hull, nodes = fargs # There is no hull for this community, it's been deleted. if cluster_hull is None: log.error("Missing hull, keeping all nodes in cluster %i", cluster) return len(nodes), nodes characteristic_size = math.sqrt(cluster_hull.area) allowed_distance = characteristic_size * args.within boundary = cluster_hull.boundary output = [] for node in nodes: # check if it is an interior node point = Point((node.lon, node.lat)) keep = False if random.random() < args.keep: keep = True elif point.distance(boundary) < allowed_distance: keep = True if keep: output.append(node) return len(nodes), output
def compute_repulsive_ws(self, control_point, obstacle): point, _, eta = control_point # need to unpack obstacle tuple into polygon and threshold obstacle_poly = obstacle[0] obstacle_thresh = obstacle[1] d2obstacle = point.distance(obstacle_poly) # this is straight from book if d2obstacle > obstacle_thresh: return Point(0, 0) else: # scalar is length of vector that points away from closest obstacle # point scalar = eta * ((obstacle_thresh ** -1) - (d2obstacle ** -1)) * (d2obstacle ** -2) # construct gradient vector # find closest point, you can ignore the details Yinan pol_ext = obstacle_poly if obstacle_poly.geom_type != "LineString": pol_ext = LinearRing(obstacle_poly.exterior.coords) d = pol_ext.project(point) p = pol_ext.interpolate(d) # closest point c = Point(list(p.coords)[0]) dqc = c.distance(point) # from book, formula for delta vector delta_d_i = Point(((point.x - c.x) / dqc, (point.y - c.y) / dqc)) return Point((-1 * delta_d_i.x * scalar, -1 * delta_d_i.y * scalar))
def ring(lat, lon, R, r, proj, EC=2.5): """Creates a ring defined by two circles with radiuses r, R centered at x, y Args: lat, lon: latitude and longitude R: outer radius of the ring in m r: inner radius of the ring in m proj. projection used EC: correction parameter """ if R == r: return None # get projected coordinates (x, y) = proj(lon, lat) # error adjust rings error_r = EC * projections.proj_error(proj, [lat, lon], r, 0) error_R = EC * projections.proj_error(proj, [lat, lon], R, 0) r -= math.fabs(error_r) R += math.fabs(error_R) if R > r: outer_circle = Point(x, y).buffer(R) inner_circle = Point(x, y).buffer(r) else: outer_circle = Point(x, y).buffer(r) inner_circle = Point(x, y).buffer(R) ring = outer_circle.difference(inner_circle) return ring
def ring(centerx, centery, radius, width): """ a circular ring """ c_out = Point(centerx, centery).buffer(radius) c_in = Point(centerx, centery).buffer(radius - width) return c_out.difference(c_in)
def inPoly(self, p): tmp = self.getCoordinates(p) p = Point( tmp[0], tmp[1] ) if p.within(self.valid_poly): return True return False
def is_on_edge(self, moveable_obj, edge_index): edge = self.vertices_of_edges[edge_index] radius = moveable_obj.fixtures[0].shape.radius center = moveable_obj.position circle = Point(center).buffer(radius) edge_line = Line(edge).buffer(1) return circle.intersects(edge_line)
def evaluate(self, action, zones, graph): scenario = Scenario_Generator( self.width, self.height, self.immobile_objs, self.mobile_objs, self.manipulatable_obj, self.target_obj, showRender=False, ) game_objects = scenario.getGameObjects() end_position, shape = scenario.evaluate(action) radius = shape.radius end_position = Point(end_position) circular_region_ball = end_position.buffer(radius) occupied_zones = [] for i in xrange(len(zones)): if zones[i].intersects(circular_region_ball): occupied_zones.append(i) if len(occupied_zones) == 0: return len(zones) # set to the maximum length min_d = 9999 for occupied_zone in occupied_zones: length = nx.shortest_path_length(graph, source=occupied_zone, target=self.target_zone) if length < min_d: min_d = length return min_d
def test_operations(self): point = Point(0.0, 0.0) # General geometry self.assertEqual(point.area, 0.0) self.assertEqual(point.length, 0.0) self.assertAlmostEqual(point.distance(Point(-1.0, -1.0)), 1.4142135623730951) # Topology operations # Envelope self.assertIsInstance(point.envelope, Point) # Intersection self.assertIsInstance(point.intersection(Point(-1, -1)), GeometryCollection) # Buffer self.assertIsInstance(point.buffer(10.0), Polygon) self.assertIsInstance(point.buffer(10.0, 32), Polygon) # Simplify p = loads('POLYGON ((120 120, 121 121, 122 122, 220 120, 180 199, ' '160 200, 140 199, 120 120))') expected = loads('POLYGON ((120 120, 140 199, 160 200, 180 199, ' '220 120, 120 120))') s = p.simplify(10.0, preserve_topology=False) self.assertTrue(s.equals_exact(expected, 0.001)) p = loads('POLYGON ((80 200, 240 200, 240 60, 80 60, 80 200),' '(120 120, 220 120, 180 199, 160 200, 140 199, 120 120))') expected = loads( 'POLYGON ((80 200, 240 200, 240 60, 80 60, 80 200),' '(120 120, 220 120, 180 199, 160 200, 140 199, 120 120))') s = p.simplify(10.0, preserve_topology=True) self.assertTrue(s.equals_exact(expected, 0.001)) # Convex Hull self.assertIsInstance(point.convex_hull, Point) # Differences self.assertIsInstance(point.difference(Point(-1, 1)), Point) self.assertIsInstance(point.symmetric_difference(Point(-1, 1)), MultiPoint) # Boundary self.assertIsInstance(point.boundary, GeometryCollection) # Union self.assertIsInstance(point.union(Point(-1, 1)), MultiPoint) self.assertIsInstance(point.representative_point(), Point) self.assertIsInstance(point.centroid, Point) # Relate self.assertEqual(point.relate(Point(-1, -1)), 'FF0FFF0F2')
def retrieve_zoom_features(loc, zoom, include_geom, layer_names): ''' Retrieve all features enclosing a given point location at a zoom level. Requests TopoJSON tile from forever.codeforamerica.org spatial index, decodes bounding boxes and geometries if necessary, then yields a stream of any feature feature whose geometry covers the requested point. ''' osm = Provider() point = Point(loc.lon, loc.lat) coord = osm.locationCoordinate(loc).zoomTo(zoom) path = '%(zoom)d/%(column)d/%(row)d' % coord.__dict__ url = census_url + 'by-tile/%s.topojson.gz' % path resp = get(url) topo = resp.json() debug('request took %.3fs from %s in %s' % (resp.elapsed.total_seconds(), url, hex(get_ident()))) start = time() assert topo['type'] == 'Topology' bbox_fails, shape_fails = 0, 0 for layer in topo['objects']: if layer_names is not None and layer not in layer_names: continue if zoom in zoom_layers: assert layer in zoom_layers[zoom] else: raise Exception('Unknown layer %d' % zoom) for object in topo['objects'][layer]['geometries']: x_, y_, _x, _y = object['bbox'] obj_box = Polygon([(x_, y_), (x_, _y), (_x, _y), (_x, y_), (x_, y_)]) if not point.within(obj_box): # object failed a simple bounding box check and can be discarded. bbox_fails += 1 continue obj_shp = decode(object, topo) if not point.within(obj_shp): # object failed a point-in-polygon check and can be discarded. shape_fails += 1 continue feature = {'type': 'Feature', 'properties': object['properties']} if include_geom: feature['geometry'] = obj_shp.__geo_interface__ yield feature debug('check took %.3fs in %s with %d bbox fails and %d shape fails' % (time() - start, hex(get_ident()), bbox_fails, shape_fails))
def project_tracks_to_road(tracks, road_segments): """ Compute tracks that fall into each road segment. Args: - tracks - road_segments Return: - track_on_road: a dictionary, each key is the index of a road segment. Each value is a set of indices of the tracks fall onto this road. """ track_on_road = {} for seg_idx in np.arange(len(road_segments)): track_on_road[seg_idx] = set([]) simplified_tracks = [] for track in tracks: line = LineString([(pt[0], pt[1]) for pt in track.utm]) simplified_track = line.simplify(10.0) simplified_tracks.append(simplified_track) # Compute road segment linestrings road_segment_linestrings = [] for r_seg in road_segments: r_start = r_seg.center - r_seg.half_length*r_seg.direction r_end = r_seg.center + r_seg.half_length*r_seg.direction r_linestring = LineString([r_start, r_end]) road_segment_linestrings.append(r_linestring) for seg_idx in np.arange(len(road_segments)): print seg_idx for track_idx in np.arange(len(tracks)): if road_segment_linestrings[seg_idx].distance(simplified_tracks[track_idx]) > 1.2*road_segments[seg_idx].half_width: continue track = tracks[track_idx] if len(track.utm) <= 1: continue for utm_idx in np.arange(len(track.utm)): utm = track.utm[utm_idx] if utm_idx == 0: direction = np.array([track.utm[utm_idx+1][0], track.utm[utm_idx+1][1]]) - \ np.array([track.utm[utm_idx][0], track.utm[utm_idx][1]]) elif utm_idx == len(track.utm) - 1: direction = np.array([track.utm[utm_idx][0], track.utm[utm_idx][1]]) - \ np.array([track.utm[utm_idx-1][0], track.utm[utm_idx-1][1]]) else: direction = np.array([track.utm[utm_idx+1][0], track.utm[utm_idx+1][1]]) - \ np.array([track.utm[utm_idx-1][0], track.utm[utm_idx-1][1]]) direction /= np.linalg.norm(direction) if np.dot(direction, road_segments[seg_idx].direction) < np.cos(np.pi/4.0): continue pt = Point(utm[0], utm[1]) if pt.distance(road_segment_linestrings[seg_idx]) < 1.2*road_segments[seg_idx].half_width: track_on_road[seg_idx].add(track_idx) break return track_on_road
list(frame) frame_nonRodent = frame[frame['SERVICECODE'] != 'S0311'] frame_nonRodent.shape frame_nonRodent['LATITUDE'].isnull().sum() # Removing records with NULL values in lat-long - to get the census code block piece to run error-free frame_nonRodent = frame_nonRodent[np.isfinite(frame_nonRodent['LATITUDE']) & np.isfinite(frame_nonRodent['LONGITUDE']) ] frame_nonRodent.shape column_names = list(frame_nonRodent.columns.values) frame_nonRodent['geometry'] = frame_nonRodent.apply(lambda row: Point(row['LONGITUDE'],row['LATITUDE']), axis=1) frame_nonRodent = geopandas.GeoDataFrame(frame_nonRodent, geometry='geometry') frame_nonRodent.crs = {'init': 'epsg:4326'} census_blocks = geopandas.GeoDataFrame.from_file(sys.argv[2])#'E:/VM/DOHHackathon/DC DOH Hackathon 2017/data sets/shapefiles and geospatial information/dc_2010_block_shapefiles/tl_2016_11_tabblock10.shp') census_blocks.crs = {'init': 'epsg:4326'} # result = geopandas.tools.sjoin(frame_nonRodent, census_blocks[['GEOID10', 'geometry']], how='inner') result = geopandas.tools.sjoin(frame_nonRodent[['geometry']], census_blocks[['GEOID10', 'geometry']], how='left') frame_nonRodent['census_block'] = result['GEOID10'] frame_nonRodent = frame_nonRodent[column_names + ['census_block']] frame_nonRodent.shape
def in_fov(self, state): """Return nll prob of detection, given fov.""" return self.fov.contains(Point(*state))
def determine_intersect( node: Point, node_class: str, l1: bool, l2: bool, first_set: str, second_set: str, first_setpointtree: STRtree, buffer_value: float, ) -> Dict[str, Union[Point, str, Tuple[str, str], bool]]: """ Determine what intersection the node represents. TODO: R0912: Too many branches. """ if node_class == "X": if l1 and l2: # It's an x-node between sets sets = (first_set, second_set) addition = { "node": node, "nodeclass": node_class, "sets": sets, "error": False, } elif l1 and not l2: # It's an x-node inside set 1 raise ValueError( f"Node {node} does not intersect both sets" f" {first_set} and {second_set}\n l1 is {l1} and l2 is {l2}") # sets = (first_set, first_set) # addition = {'node': node, 'nodeclass': c, 'sets': sets} elif not l1 and l2: # It's an x-node inside set 2 raise ValueError( f"Node {node} does not intersect both sets" f" {first_set} and {second_set}\n l1 is {l1} and l2 is {l2}") # sets = (second_set, second_set) # addition = {'node': node, 'nodeclass': c, 'sets': sets} else: raise ValueError( f"Node {node} does not intersect both sets" f" {first_set} and {second_set}\n l1 is {l1} and l2 is {l2}") # ALL Y NODE RELATIONS elif node_class == "Y": if (l1 is True) and (l2 is True): # It's an y-node between sets # p1 == length of list of nodes from first_set traces # that intersect with X- or Y-node p1 = len(first_setpointtree.query(node.buffer(buffer_value))) if p1 != 0: # set 1 ends in set 2 sets = (first_set, second_set) else: # set 2 ends in set 1 sets = (second_set, first_set) addition = { "node": node, "nodeclass": node_class, "sets": sets, "error": False, } elif (l1 is True) and (l2 is False): # It's a y-node inside set 1 raise ValueError( f"Node {node} does not intersect both sets" f" {first_set} and {second_set}\n l1 is {l1} and l2 is {l2}") # sets = (first_set, first_set) # addition = {'node': node, 'nodeclass': c, 'sets': sets} elif (l1 is False) and (l2 is True): # It's a y-node inside set 2 raise ValueError( f"Node {node} does not intersect both sets" f" {first_set} and {second_set}\n l1 is {l1} and l2 is {l2}") # sets = (second_set, second_set) # addition = {'node': node, 'nodeclass': c, 'sets': sets} else: raise ValueError( f"Node {node} does not intersect both sets" f" {first_set} and {second_set}\n l1 is {l1} and l2 is {l2}") else: raise ValueError(f"Node {node} neither X or Y") return addition
Permit['MERGEADDR'] = Permit['FULLADDR_P'].str.replace(' ', '') Address['MERGEADDR'] = Address['FULLADDR'].str.replace(' ', '') Address_Permit = Address.merge(Permit, how='inner', left_on='MERGEADDR', right_on='MERGEADDR') Address_Permit = Address_Permit.loc[:, [ 'PI_MUNICIPAL_ADDRESS_ID', 'FULLADDR', 'ISSUED DATE', 'APPL TYPE', 'BLG TYPE ', 'CONTRACTOR ', 'DESCRIPTION', 'VALUE', 'FT2', 'Sq Ft', 'lat', 'lon' ]] Address_Permit['ISSUED DATE'] = Address_Permit['ISSUED DATE'].dt.strftime( '%m/%Y') Address_Permit['ISSUED DATE'] = Address_Permit['ISSUED DATE'].astype(str) geometry = [ Point(xy) for xy in zip(Address_Permit['lon'], Address_Permit['lat']) ] Address_Permit = gp.GeoDataFrame(Address_Permit, geometry=geometry) # Address_Permit.crs = {'init' :'epsg:4326'} Address_Permit.to_file('ottPermits.geojson', driver='GeoJSON', encoding="utf-8") #UNCOMMENT BELOW IF YOU WANT TO UPLOAD to MAPBOX OTHERWISE HOST ON GITHUB # u = mb.Uploader(access_token=sk) # url = u.stage(open('ottPermits.geojson', 'rb')) # job = u.create(url, 'ottawaPermits').json()
def _make_context_entries(file_paths): ''' Iterates the metadata, Makes an object mirror a sample_context tuple and returns it TODO: Allow for automated 0 values when a field is missing. ''' logger.info('loading edna contextual metadata from .tsv files') # site_id delcared here so we can go over multiple files at once. site_id = 0 # loading in soil multipoly so we don't have to redo it multiple times # since it's >100mb soil_shapefile = fiona.open("edna/soil_classification_data/fsl-new-zealand-soil-classification.shp") logger.info(soil_shapefile.schema) # since soil classification takes a while soil_class_lookup = {} for fname in file_paths: with open(fname, "r") as file: logger.info(fname) # with open(fname, "r", encoding='utf-8-sig') as file: reader = csv.DictReader(file, delimiter=',') for file_row in reader: attrs ={} site_lookup[site_hash(file_row['Sample_identifier'].upper())] = site_id # testing it won't grab two site id entries instead of overwrite existing attrs['id'] = site_id for edna_ontology_item in DataImporter.edna_sample_ontologies: # if it's an ontology field just add '_id' to the end of the name if edna_ontology_item not in file_row: continue attrs[_clean_field(edna_ontology_item) + '_id'] = mappings[edna_ontology_item][file_row[edna_ontology_item]] for edna_ontology_item, value in file_row.items(): cleaned_field = _clean_field(edna_ontology_item) if cleaned_field in attrs or (cleaned_field + '_id') in attrs: continue attrs[cleaned_field] = _clean_value(value) if _clean_value(value) == '' or _clean_value(value) == ' ': attrs[cleaned_field] = 0 site_id += 1 # adding soil classification # attr_point = shapely.geometry.shape({ # 'type': 'Point', # 'coordinates': (float(attrs['longitude']), float(attrs['latitude'])) # }) attr_point = Point(float(attrs['longitude']), float(attrs['latitude'])) attr_key = attrs['longitude'] + "," + attrs['latitude'] if attr_key in soil_class_lookup: logger.info("soil data exists, using lookup value") attrs['soil_type'] = soil_class_lookup[attr_key] else: logger.info(attr_point) soil_class = None for feature in soil_shapefile: # logger.info(feature) if attr_point.within(shape(feature['geometry'])): # logger.info(feature['properties']['nzsc_class']) soil_class = feature['properties']['nzsc_class'] soil_class_lookup[attr_key] = feature['properties']['nzsc_class'] logger.info(soil_class) attrs['soil_type'] = soil_class if soil_class is None: soil_class_lookup[attr_key] = None yield SampleContext(**attrs)
def get_projections(lines, cluster_centre, unique_labels, times, matrix): """ Algorithm : For each point -> find lines originating from cluster centres, project on the line where the distance is shortest """ projections = [] for i in range(len(matrix)): #print matrix[i] #print times[i] index = np.where(unique_labels == times[i])[0] #Cluster Centre for the current point centre = (cluster_centre[index[0]][0], cluster_centre[index[0]][1]) #Extract all the lines originating from the cluster centre temp_lines = [] for line in lines: if centre in line: temp_lines.append(line) distances = [] for line in temp_lines: #Get the line from the cluster centre l = LineString(line) #Define Point q = Point(matrix[i][0], matrix[i][1]) distances.append(q.distance(l)) #Obtain the Minimum Distance and the line corresponding to it --> Then project the point onto the line min_distance_index = distances.index(min(distances)) #Line on which the projection has to take place projection_line = temp_lines[min_distance_index] p_l = LineString(projection_line) ns = p_l.interpolate(p_l.project(Point(matrix[i][0], matrix[i][1]))) projections.append(np.array(ns)) projections = np.array(projections) print lines X = projections[:, 0] Y = projections[:, 1] plt.scatter(X, Y, c=times, s=130, alpha=0.4) plt.show() #plt.clf() #When doing in an unsupervised way """ Projection of branches with the starting point mentioned """ """ #Known Biological Knowledge starting_point = (cluster_centre[0][0],cluster_centre[0][1]) temp_point = (cluster_centre[1][0],cluster_centre[1][1]) #Get the line for the starting point for line in lines: if starting_point in line: first_line = line if temp_point in line: second_line = line #Construct Line String l_s = LineString(first_line) #Get the Line with has to be projected projection_line = LineString(second_line) new_projections = [] old_projections = [] i = 0 temp = [] for point in projections: temp_point = Point((point[0],point[1])) if projection_line.distance(temp_point) < 0.00000000001 : #Project point onto first line a = np.array(l_s.interpolate(l_s.project(temp_point))) new_projections.append(a) temp.append(times[i]) else: new_projections.append(point) i += 1 new_projections = np.array(new_projections) old_projections = np.array(old_projections) x = new_projections[:,0] y = new_projections[:,1] #plt.cla() plt.scatter(x,y,c=times, s=130,alpha=0.4) plt.show() #plt.scatter(old_projections[:,0],old_projections[:,1],s=130,alpha=0.4) """ return
def pg_conn(): conn = psycopg2.connect(conf.connection_str) return conn conn = pg_conn() curs = conn.cursor() sqlstr = "SELECT distinct(geocoded_address), geocoded_lng, geocoded_lat from tickets" curs.execute(sqlstr) geocoded_addrs = curs.fetchall() potential_grid_addrs = dict([ (addr,[]) for addr,_,__ in geocoded_addrs]) addr_points = dict([ (addr, Point(lng, lat)) for addr,lng,lat in geocoded_addrs]) print("Finding close grid ids") for grid_id, grid_centroid in grid_centroids.items(): grid_lng, grid_lat = grid_centroid for address, geo_lng, geo_lat in geocoded_addrs: lng_diff = abs(grid_lng - geo_lng) lat_diff = abs(grid_lat - geo_lat) if lng_diff < .004 and lat_diff < .004: potential_grid_addrs[address].append(grid_id) print("Finding addrs in grids")
def geo_one_point_plot(lng, lat, base): # 画点 point1 = Point(lng, lat) point_df = gpd.GeoDataFrame(geometry=[point1]) point_df.plot(ax=base, color='black', alpha=0.5)
def test_equal_comp_op(self): s = GeoSeries([Point(x, x) for x in range(3)]) res = s == Point(1, 1) exp = pd.Series([False, True, False]) assert_series_equal(res, exp)
def is_within(self, lon, lat, radius): return self.location.intersects(Point(lon, lat).buffer(radius))
def on_touch_up_from_scatter(self, tx: int, ty: int): """ Ran by the scatter when user is not dragging """ # noinspection PyTypeChecker to_select: BuildingBase = None point = Point(tx, ty) building: BuildingBase for building in self.buildings: # I want my long time and effort to be remembered, this to so long, AND THE ANSWER WAS SO SIMPLE OMG """bPos = building._obj.pos[0], building._obj.pos[1], building._obj.pos[2] bPos2 = building._obj.pos[0] + self.renderer.width, building._obj.pos[1] + self.renderer.height, building._obj.pos[2] cPos = self.camera.pos""" """bVPos = Vector3(bPos) bVPos2 = Vector3(bPos2) x, y, z = bVPos + Vector3(0, 0, 100) x2, y2, z2 = bVPos2 + Vector3(0, 0, 100) print(x, y, z, x2, y2, z2)""" """pitch = atan((bPos[0] - cPos.x) / (bPos[1] - cPos.y)) yaw = atan((bPos[2] - cPos.z) / (bPos[1] - cPos.y)) x = width() / 2 + (pitch * (width() / self.camera.fov)) y = height() / 2 + (yaw * (height() / self.camera.fov)) pitch2 = atan((bPos2[0] - cPos.x) / (bPos2[1] - cPos.y)) yaw2 = atan((bPos2[2] - cPos.z) / (bPos2[1] - cPos.y)) x2 = width() / 2 + (pitch2 * (width() / self.camera.fov)) y2 = height() / 2 + (yaw2 * (height() / self.camera.fov)) print(touch.x, touch.y) print(bPos, bPos2, cPos) print(x, y, x2, y2) print(self.camera.rotation)""" """print(building) print(building._obj.pos, building._obj.scale) print(building._obj._instructions, building._obj._instructions.children) print() print() print(building._obj._translate.matrix) print() print() print(building._obj._scale.matrix) print() print() print(building._obj._rotors["x"].matrix) print() print(building._obj._rotors["y"].matrix) print() print(building._obj._rotors["z"].matrix)""" """print(building._obj.pos, (building._obj.pos[0] - (width() / 2), building._obj.pos[1] - (height() / 2)), building._obj.scale.xyz) print(touch.pos) print(touch.pos[0] - (width() / 2), touch.pos[1] - (height() / 2)) print(self.renderer.size) print(self.scatter_widget.scale) print(Vector3.get_XY_from_camera(building._obj.pos, self.camera)) print() print() print() print() print(Matrix()) print(Matrix().project(building._obj.pos[0], building._obj.pos[1], building._obj.pos[2], Matrix(), Matrix(), self.camera.pos.x, self.camera.pos.y, width(), height()))""" polygon = Polygon(building.get_projected_tl_tr_br_bl()) if polygon.contains(point): to_select = building break if to_select is None: self.log_deep_debug("User touched but no building was clicked") get_screen("BaseBuildScreen").ids["building_buttons_handler"].clear_buttons() else: buildings = self.buildings.copy() buildings.remove(to_select) for building in buildings: building.selected = False to_select.selected = True self.log_debug("Building", to_select, "was clicked on, setting building to selected")
state.crs # %% fig, ax = plt.subplots(figsize=(5, 5)) state.plot(ax=ax) plt.show() # %% # Add some points # Phoenix: 33.448, -112.074 # Tucson: 32.2226, -110.9747 cities_list = np.array([[-112.074, 33.448], [-110.9747, 32.2226]]) # %% # make these into spatial features cities_geom = [Point(xy) for xy in cities_list] cities_geom # %% # map a dataframe of these points cities_df = gpd.GeoDataFrame(cities_geom, columns=['geometry'], crs=state.crs) # %% # ///////////////// # Plotting # Check crs alignment for layers print("Gages_AZ crs:", gages_AZ.crs, "\n") print("Watershed crs:", wshed.crs, "\n") print("State Boundary crs:", state.crs, "\n") print("Cities crs =", cities_df.crs, "\n")
c1 = m1 * x1 - y1 #polygon 2 w115 st/w125 st array2 = [(40.817984, -73.960389), (40.808272, -73.966537), (40.805422, -73.959864), (40.811041, -73.954274)] poly2 = Polygon(array2) with open('data.csv') as s: reader = csv.DictReader(s) for row in reader: #original latitude and longitude values in str latitude = row['latitude'] longitude = row['longitude'] latitude_scaled = float(latitude) longitude_scaled = float(longitude) pt = Point(latitude_scaled, longitude_scaled) if poly1.contains(pt): perpendicular_gradient = -1 / m1 # eqn of line a = -perpendicular_gradient b = 1.0 c = perpendicular_gradient * latitude_scaled - longitude_scaled # find intersection point x = (c1 - c) / (-a1 + a) y = -a1 * x - c1 # perpendicular distance by Pythagoras' theorem distance = math.sqrt((y - longitude_scaled)**2 + (x - latitude_scaled)**2) price = row['price'] # print latitude + ", "+ longitude+", "+str(x)+", "+ str(y)+", "+ str(price) + ", " + str(distance*100000) # print str(price) + ", " + str(distance*100000)
xy_out = np.divide(xy, 10.64) xy_out = Rotate2D(np.fliplr(xy_out), (0, 0), np.deg2rad(32.0053832077)) xy_out = np.add(xy_out, (417944.79, 443361.75)) np.savetxt('xy.csv', xy_out, delimiter=',', header='x,y', comments='') schema = { 'geometry': 'Point', 'properties': { 'id': 'int' }, } i = 0 with fiona.open('xy.shp', 'w', 'ESRI Shapefile', schema) as c: for row in xy_out: point = Point(row) c.write({ 'geometry': mapping(point), 'properties': { 'id': i }, }) i += 1 plt.savefig('Points.png', bbox_inches='tight') plt.autoscale(False) plt.plot(xy[:, 1], xy[:, 0], 'bo') plt.savefig('result.png', bbox_inches='tight') plt.show(block=True) plt.close()
sid = SentimentIntensityAnalyzer() late_sleep_data = query.query_view('db_twitters', 'stayuplateinworkday') all_day_data = query.query_view('db_twitters', 'twitterinworkday') sid = SentimentIntensityAnalyzer() print len(late_sleep_data) print len(all_day_data) i = 0 for tweet in late_sleep_data: #print ('count:', i) i += 1 coordinates = tweet['value']['coordinates'] point = Point(coordinates[0], coordinates[1]) loop_flag = True for suburb in suburbs.keys(): for key, value in suburbs[suburb].items(): if key == 'poly': poly = value if poly.contains(point): suburbs[suburb]['late_sleep_count'] += 1 loop_flag = False break if loop_flag == False: break j = 0 for tweet in all_day_data[:2000]: print('count:', j)
r = requests.get('https://api-v3.mbta.com/stops?filter[route_type]=1', headers={'api_key': '17635449db9d4d41a6cd765ad49ab78e'}) stops = OrderedDict() for item in r.json()['data']: description = item['attributes']['description'] name = description.split('-')[0] line = description.split('-')[1] if name not in stops.keys(): entry = {} entry['line'] = line entry['lat'] = item['attributes']['latitude'] entry['lon'] = item['attributes']['longitude'] entry['geometry'] = Point(entry['lon'], entry['lat']) stops[name] = entry df = pd.DataFrame.from_dict(stops, orient='index') street_map = gpd.read_file('shapefile/bos_land.shp') street_map = street_map.to_crs({'init': 'epsg:4326'}) fig, ax = plt.subplots(figsize=(15, 15)) street_map.plot(ax=ax, alpha=0.4, color="grey") crs = {'init': 'epsg:4326'} geometry = [row['geometry'] for idx, row in df.iterrows()] geo_df = gpd.GeoDataFrame(df, crs=crs, geometry=geometry) geo_df[geo_df['line'].str.contains('Red Line') == True].plot(ax=ax,
def test_split_closed_line_with_point(self): # point at start/end of closed ring -> return equal # see GH #524 ls = LineString([(0, 0), (0, 1), (1, 1), (1, 0), (0, 0)]) splitter = Point(0, 0) self.helper(ls, splitter, 1)
def test_polygon_contain_a_point(self): polygons = self.read_test_polygons() point = (5, 5) self.assertTrue(polygons[0].contains(Point(point))) point = (9, 9.1) self.assertFalse(polygons[0].contains(Point(point)))
print("Loading saved part 2 graph") H = nx.read_gpickle(Hexport2) else: from shapely.geometry import Point, LineString from scipy.spatial import distance def closest_node(node, nodes): closest_index = distance.cdist([node], nodes).argmin() return nodes[closest_index] print("Adding edges to H between ward center and nearest road node") l = range(len(shape_roads)) #Did this programatically count = 0 for node in ALL_coord_pair_wc: count += 1 point = Point(node) cutting_list = [] longest_distance = 10000000000000000000000000 # ~ = infinity c = 0 #c will be the index of road segment selected #Parse list of road segments and update until closest one selected (c = index) for a in l: road_seg_a = LIST_ROAD_SEG[a] line_a = LineString(road_seg_a) dist_to_road = point.distance( line_a) #how far is the road from this wc? if dist_to_road < longest_distance: longest_distance = dist_to_road c = a #this gives us index number of the road to access for adding point #Get linestring for the closest road segment
from shapely.geometry import Point, Polygon from datetime import datetime from geopy.distance import geodesic import datetime # read data and Victoria region shapefile in python station_info = pd.read_table("rainfall_data\HQMR_stations.txt", sep="\s+", header=None) victoria_map = gpd.read_file( "E:\\Study\\20 semester 1\\Project\\bushfire\\victoria\\VIC_STATE_POLYGON_shp" ) rainfall_station_dic = {} # for each monitoring station, judgment the camping site point within the bush fire area for i in range(0, len(station_info)): # convert latitude and longitude into point point = Point(station_info.loc[i, 2], station_info.loc[i, 1]) # judgment the camping site point within the bush fire area records = victoria_map.geometry.contains(point) if pd.Series(records).any() == True: rainfall_station_dic[station_info.loc[i, 0]] = [ station_info.loc[i, 1], station_info.loc[i, 2] ] # create attraction coordinates according to the position place_latlng = { "-37.999183, 147.640562": "Gippsland lakes", "-37.863799, 144.973203": "St kilda", "-37.916325, 144.986495": "Brighton Beach", "-37.210955, 142.397560": "Grampians", "-37.834896, 145.347100": "Dandenong ranges", "-37.810858, 144.965682": "Main Melbourne", "-37.233926, 146.436825": "Mount Buller",
async def process_source(filename): logging.info(f"Processing {filename}") headers = { "User-Agent": "Mozilla/5.0 (compatible; MSIE 6.0; ELI WMS sync )" } timeout = aiohttp.ClientTimeout(total=10) conn = aiohttp.TCPConnector(limit_per_host=2) async with ClientSession(headers=headers, timeout=timeout, connector=conn) as session: out_image = os.path.join( outdir, os.path.basename(filename).replace(".geojson", ".png")) if os.path.exists(out_image): return async with aiofiles.open(filename, mode="r", encoding="utf-8") as f: contents = await f.read() source = json.loads(contents) # Skip non tms layers if not source["properties"]["type"] in {"tms", "wms"}: return if "geometry" in source and source["geometry"] is not None: geom = shape(source["geometry"]) centroid = geom.representative_point() else: centroid = Point(0, 0) async def test_zoom(zoom): tile = mercantile.tile(centroid.x, centroid.y, zoom) if source["properties"]["type"] == "tms": url = await get_tms_image(tile, source, session) elif source["properties"]["type"] == "wms": url = await get_wms_image(tile, source, session) if url is None: return None, None, None try: status, img = await get_image(session, url) if status == ImageStatus.SUCCESS: image_hash = imagehash.average_hash(img) pal_image = Image.new("P", (1, 1)) pal_image.putpalette((0, 0, 0, 0, 255, 0, 255, 0, 0, 255, 255, 0) + (0, 0, 0) * 252) img_comp = img.convert("RGB").quantize(palette=pal_image) colors = img_comp.getcolors(1000) max_pixel_count = max([count for count, color in colors]) return image_hash, img, max_pixel_count except Exception as e: logging.error(e) return None, None, None image_hashes = {} max_pixel_counts = {} images = {} for zoom in range(20): image_hash, img, max_pixel_count = await test_zoom(zoom) images[zoom] = img image_hashes[zoom] = image_hash max_pixel_counts[zoom] = max_pixel_count # Getting images was not sucessful, nothing to do if len([zoom for zoom in range(20) if images[zoom] is None]) == len(range(20)): return def compare_neighbors(zoom): same_as_a_neighbor = False this_hash = image_hashes[zoom] if zoom - 1 >= 0: left_hash = image_hashes[zoom - 1] if left_hash == this_hash: same_as_a_neighbor = True if zoom + 1 < 20: right_hash = image_hashes[zoom + 1] if right_hash == this_hash: same_as_a_neighbor = True return same_as_a_neighbor def zoom_in_is_empty(zoom): if zoom + 1 < 20: if (image_hashes[zoom + 1] is None or max_count( str(image_hashes[zoom + 1]).upper().replace("F", "O")) == 16): return True return False # Find minzoom min_zoom = None for zoom in range(20): if image_hashes[zoom] is None: continue if zoom_in_is_empty(zoom): continue if max_count(str(image_hashes[zoom]).upper().replace("F", "O")) == 16: continue if not compare_neighbors(zoom): min_zoom = zoom break fig, axs = plt.subplots(2, 10, figsize=(15, 5)) for z in range(20): if z < 10: ax = axs[0][z] else: ax = axs[1][z - 10] ax.set_xlim(0, 256) ax.set_ylim(0, 256) if images[z] is not None: ax.imshow(images[z]) else: ax.text( 0.5, 0.5, "No data", horizontalalignment="center", verticalalignment="center", transform=ax.transAxes, ) ax.set_aspect("equal") # ax.tick_params(axis='both', which='both', length=0.0, width=0.0) ax.get_xaxis().set_ticks([]) ax.get_yaxis().set_ticks([]) if image_hashes[z] is None: ax.set_xlabel("") else: ax.set_xlabel( str(image_hashes[z]) + "\n" + str(max_pixel_counts[z] - 256 * 256)) ax.set_ylabel(z) title = "Zoom: {}".format(z) if z == min_zoom: title += " <== " if ("min_zoom" not in source["properties"] and z == 0) or ("min_zoom" in source["properties"] and source["properties"]["min_zoom"] == z): title += " ELI " ax.set_title(title) if "attribution" in source["properties"] and "text" in source[ "properties"]["attribution"]: plt.figtext(0.01, 0.01, source["properties"]["attribution"]["text"]) def update_source(selected_min_zoom, source, filename): # Check against source if we found at least one image if selected_min_zoom is not None: original_min_zoom = 0 if "min_zoom" in source["properties"]: original_min_zoom = source["properties"]["min_zoom"] # Do nothing if existing value is same as tested value if (selected_min_zoom is None or selected_min_zoom == 0) and "min_zoom" not in source["properties"]: return if not selected_min_zoom == original_min_zoom: logging.info("Update {}: {}, previously: {}".format( source["properties"]["name"], selected_min_zoom, original_min_zoom, )) if selected_min_zoom is None or selected_min_zoom == 0: source["properties"].pop("min_zoom", None) else: source["properties"]["min_zoom"] = selected_min_zoom with open(filename, "w", encoding="utf-8") as out: json.dump(source, out, indent=4, sort_keys=False, ensure_ascii=False) out.write("\n") def on_click(event): try: selected_min_zoom = int( event.inaxes.yaxis.get_label().get_text()) update_source(selected_min_zoom, source, filename) if selected_min_zoom < 10: ax = axs[0][selected_min_zoom] else: ax = axs[1][selected_min_zoom - 10] for sp in ax.spines.values(): sp.set_color("red") plt.savefig(out_image) plt.close() except Exception as e: print(str(e)) def on_key(event): selected_min_zoom = min_zoom update_source(selected_min_zoom, source, filename) if selected_min_zoom < 10: ax = axs[0][selected_min_zoom] else: ax = axs[1][selected_min_zoom - 10] for sp in ax.spines.values(): sp.set_color("red") plt.savefig(out_image) plt.close() fig.suptitle(filename) plt.tight_layout() fig.canvas.mpl_connect("button_press_event", on_click) fig.canvas.mpl_connect("key_press_event", on_key) plt.show() try: plt.close() except Exception as e: logging.warning(str(e)) return
def data_manipulation_sh(network): from shapely.geometry import Point, LineString, MultiLineString from geoalchemy2.shape import from_shape, to_shape #add connection from Luebeck to Siems new_bus = str(int(network.buses.index.max()) + 1) new_trafo = str(int(network.transformers.index.max()) + 1) new_line = str(int(network.lines.index.max()) + 1) network.add("Bus", new_bus, carrier='AC', v_nom=220, x=10.760835, y=53.909745) network.add("Transformer", new_trafo, bus0="25536", bus1=new_bus, x=1.29960, tap_ratio=1, s_nom=1600) network.add("Line", new_line, bus0="26387", bus1=new_bus, x=0.0001, s_nom=1600) network.lines.loc[new_line, 'cables'] = 3.0 #bus geom point_bus1 = Point(10.760835, 53.909745) network.buses.set_value(new_bus, 'geom', from_shape(point_bus1, 4326)) #line geom/topo network.lines.set_value( new_line, 'geom', from_shape( MultiLineString([ LineString([to_shape(network.buses.geom['26387']), point_bus1]) ]), 4326)) network.lines.set_value( new_line, 'topo', from_shape( LineString([to_shape(network.buses.geom['26387']), point_bus1]), 4326)) #trafo geom/topo network.transformers.set_value( new_trafo, 'geom', from_shape( MultiLineString([ LineString([to_shape(network.buses.geom['25536']), point_bus1]) ]), 4326)) network.transformers.set_value( new_trafo, 'topo', from_shape( LineString([to_shape(network.buses.geom['25536']), point_bus1]), 4326)) return
def get_distance_from_road_center(self, x, y): line = LineString([(self.start_node.X, self.start_node.Y), (self.end_node.X, self.end_node.Y)]) point = Point(x, y) distance = point.distance(line) return distance
def circle(center, radius=1): assert len(center) in [2, 3], \ 'Center of circle must have 2 or 3 elements' assert radius > 0, 'Radius must be greater than zero' center = Point(*center) return center.buffer(radius)
def pt_in_poly(poly, pt): return poly.contains(Point(pt))
def create_network(buildings, gen_lat, gen_lng): """ Create a network of lines and nodes from the buildings file, using a Minimum spanning tree to generate the connecting lines between the buildings. Parameters ---------- buildings: geopandas.GeoDataFrame All of the buildings with attribues and geometries. gen_lat: float Latitude of PV generator. gen_lng: float Longitude of PV generator. Returns ------- network: list of lists Each list within the list contains a single network arc, with the following attributes: 0 index 1 xs 2 ys 3 xe 4 ye 5 node index first point 6 node index last point 7 whether this arc is directed (0 or 1) 8 arc length 9 whether enabled (default to 1) nodes: list of list Each list within contains a single building node, with the PV point at index 0. Each elementhas the following attributes: 0 index 1 x 2 y 3 area_m2 4 marginal distance 5 total distance 6 connected (default to 0) 7.. connected arc indices """ gen_lat = float(gen_lat) gen_lng = float(gen_lng) buildings_projected = buildings.to_crs(EPSG102022) buildings_points = buildings_projected.copy() buildings_points.geometry = buildings_points['geometry'].centroid buildings_points['X'] = buildings_points.geometry.x buildings_points['Y'] = buildings_points.geometry.y # We then take all the houses and calculate the optimum network that connects them all to the PV point, # before we start analysing further and deciding on the optimum network. df = pd.DataFrame(buildings_points) pv_point = gpd.GeoDataFrame(crs={'init': 'epsg:4326'}, geometry=[Point([gen_lng, gen_lat])]) pv_point_projected = pv_point.copy() pv_point_projected = pv_point_projected.to_crs(EPSG102022) pv_point_df = [{ 'X': pv_point_projected.geometry.x, 'Y': pv_point_projected.geometry.y, 'area': 0 }] df = pd.concat([pd.DataFrame(pv_point_df), df], ignore_index=True) points = df[['X', 'Y']].as_matrix() T_x, T_y = get_spanning_tree(points) # This point and line data is then copied into two arrays, called *nodes* and *network*, # containing the houses and lines, respectively. # Each element represents a single house or joining arc, and has data within describing the coordinates and more. # astype(int) doesn't round - it just chops off the decimals nodes_list = df[['X', 'Y', 'area']].reset_index().values.astype(int).tolist() #for node in nodes: # add default 0's for marg_dist, tot_dist and connected # node.extend([0, 0, 0]) nodes = [] for n in nodes_list: nodes.append({ 'i': n[0], 'x': n[1], 'y': n[2], 'area': n[3], 'marg_dist': 0, 'tot_dist': 0, 'conn': 0, 'arcs': [] }) counter = 0 network = [] for xs, ys, xe, ye in zip(T_x[0], T_y[0], T_x[1], T_y[1]): network.append({ 'i': counter, 'xs': int(xs), 'ys': int(ys), 'xe': int(xe), 'ye': int(ye), 'ns': -99, 'ne': -99, 'dir': 0, 'len': 0, 'enabled': 1 }) counter += 1 # add the length for each arc for arc in network: arc['len'] = sqrt((arc['xe'] - arc['xs'])**2 + (arc['ye'] - arc['ys'])**2) network, nodes = direct_network(network, nodes, 0) # for every node, add references to every arc that connects to it for arc in network: nodes[arc['ns']]['arcs'].append(arc['i']) nodes[arc['ne']]['arcs'].append(arc['i']) return network, nodes
ax.set_title("HUC Boundaries") plt.show() HUC6.crs # %% # Add some points # UA: 32.22877495, -110.97688412 # STream gauge: 34.44833333, -111.7891667 point_list = np.array([[-110.97688412, 32.22877495], [-111.7891667, 34.44833333]]) #make these into spatial features point_geom = [Point(xy) for xy in point_list] point_geom #mape a dataframe of these points point_df = gpd.GeoDataFrame(point_geom, columns=['geometry'], crs=HUC6.crs) # plot these on the first dataset #Then we can plot just one layer at atime fig, ax = plt.subplots(figsize=(5, 5)) HUC6.plot(ax=ax) point_df.plot(ax=ax, color='red', marker='*') ax.set_title("HUC Boundaries") plt.show()
meta.update(compress='lzw') lons, lats = coordinates(template_fn, False) # test change lats = np.ma.masked_where(template_rst.read(1) < -3.39999995e+34, lats) for month, days in julian_months_grouped.indices.iteritems(): month = str(month) if len(month) == 1: month = '0' + month month_mean = np.dstack([calcRa(lats, day) for day in days + 1]).mean(axis=2) # [TEST]: it may be necessary to take this array and its coords in latlong and reproject it to 3338 lons = np.ma.masked_where(template_rst.read(1) < -3.39999995e+34, lats) pts = [ Point(lalo) for lalo in zip(lons.ravel().tolist(), lats.ravel().tolist()) ] mm = month_mean.ravel().tolist() df = pd.DataFrame({'Ra': mm, 'geometry': pts}) # remove masking gdf = gpd.GeoDataFrame(df) break # then rasterize this to the extent of the template_rst # [END TEST] output_filename = os.path.join( output_path, 'ra_mean_allen1998_' + month + 'netest_akcan.tif') with rasterio.open(output_filename, 'w', **meta) as out: out.write(month_mean.astype(template_rst.dtypes[0]), 1)
# function to create points per row #def point(row): # latitude = row["lat"] # longitude = row["lon"] # return Point(latitude,longitude) #populates geometry column with #df["geometry"] = df.apply(point,axis=1) #progress check #df.head() #iterates over rows and populates geometry column with points..(runtime longer than using a function) for index, row in df.iterrows(): geo.loc[index, 'geometry'] = Point(row["lon"], row["lat"]) #progress check geo.head() #makes a copy of the dataframe without the column for coordinates as tuples geo_out = geo[[ 'lat', 'lon', 'timestamp', 'userid', 'geometry', ]].copy() geo_out = gpd.GeoDataFrame(geo_out, geometry="geometry")
def main(): data_path, calc_path, output_path = load_config( )['paths']['data'], load_config()['paths']['calc'], load_config( )['paths']['output'] # population_points_in = os.path.join(data_path,'Points_of_interest','population_points.shp') # commune_path = os.path.join(data_path,'Vietnam_boundaries','boundaries_stats','commune_level_stats.shp') # crop_data_path = os.path.join(data_path,'Agriculture_crops','crop_data') # rice_month_file = os.path.join(data_path,'rice_atlas_vietnam','rice_production.shp') # crop_month_fields = ['P_Jan','P_Feb','P_Mar','P_Apr','P_May','P_Jun','P_Jul','P_Aug','P_Sep','P_Oct','P_Nov','P_Dec'] # crop_names = ['rice','cash','cass','teas','maiz','rubb','swpo','acof','rcof','pepp'] ''' Get the modal shares ''' # modes_file_paths = [('Roads','national_roads'),('Railways','national_rail'),('Airports','airnetwork'),('Waterways','waterways')] modes_file_paths = [('Roads', 'national_roads')] # modes_file_paths = [('Roads','national_roads'),('Railways','national_rail'),('Airports','airnetwork'),('Waterways','waterways'),('Waterways','waterways')] modes = ['road', 'rail', 'air', 'inland', 'coastal'] mode_cols = ['road', 'rail', 'air', 'inland', 'coastal'] new_mode_cols = ['o', 'd', 'road', 'rail', 'air', 'inland', 'coastal'] # new_mode_cols = ['o','d','road','rail','air','water'] # modes_file_paths = [('Railways','national_rail')] # modes = ['rail','air','water'] # modes_file_paths = [('Airports','airnetwork')] # modes = ['air','water'] od_data_file = os.path.join(data_path, 'OD_data', 'OD_transport_data.xlsx') od_data_modes = pd.read_excel(od_data_file, sheet_name='mode').fillna(0) # od_data_modes.columns = map(str.lower, od_data_modes.columns) o_id_col = 'o' d_id_col = 'd' od_data_modes['total'] = od_data_modes[mode_cols].sum(axis=1) for m in mode_cols: od_data_modes[m] = od_data_modes[m] / od_data_modes['total'].replace( np.inf, 0) # od_data_modes['water'] = od_data_modes['inland'] + od_data_modes['coastal'] od_data_modes = od_data_modes.fillna(0) # od_data_modes.to_csv('mode_frac.csv',index = False) od_fracs = od_data_modes[new_mode_cols] od_data_com = pd.read_excel(od_data_file, sheet_name='goods').fillna(0) ind_cols = [ 'sugar', 'wood', 'steel', 'constructi', 'cement', 'fertilizer', 'coal', 'petroluem', 'manufactur', 'fishery', 'meat' ] od_fracs = pd.merge(od_fracs, od_data_com, how='left', on=['o', 'd']).fillna(0) # od_fracs.to_csv('test0.csv') # print (od_fracs) # del od_data_com,od_data_modes od_fracs_crops = od_data_modes[new_mode_cols] crop_cols = ['rice', 'indust-cro'] for cr in crop_cols: od_data_com_sums = od_data_com.groupby(['o', 'd']).agg({cr: 'sum'}) od_com_frac = od_data_com_sums.groupby( level=0).apply(lambda x: x / float(x.sum())) od_com_frac = od_com_frac.reset_index(level=['o', 'd']) od_fracs_crops = pd.merge(od_fracs_crops, od_com_frac, how='left', on=['o', 'd']).fillna(0) del od_data_com, od_data_com_sums, od_com_frac # print (od_fracs_crops) # find the crop production months for the provinces crop_data_path = os.path.join(data_path, 'Agriculture_crops', 'crop_data') rice_month_file = os.path.join(data_path, 'rice_atlas_vietnam', 'rice_production.shp') crop_month_fields = [ 'P_Jan', 'P_Feb', 'P_Mar', 'P_Apr', 'P_May', 'P_Jun', 'P_Jul', 'P_Aug', 'P_Sep', 'P_Oct', 'P_Nov', 'P_Dec' ] crop_names = [ 'rice', 'cash', 'cass', 'teas', 'maiz', 'rubb', 'swpo', 'acof', 'rcof', 'pepp' ] rice_prod_months = gpd.read_file(rice_month_file) rice_prod_months['total_prod'] = rice_prod_months[crop_month_fields].sum( axis=1) rice_prod_months['min_tons'] = rice_prod_months[ rice_prod_months[crop_month_fields] > 0].min(axis=1) rice_prod_months['max_tons'] = rice_prod_months[ rice_prod_months[crop_month_fields] > 0].max(axis=1) rice_prod_months['min_frac'] = rice_prod_months[ 'min_tons'] / rice_prod_months['total_prod'] rice_prod_months['max_frac'] = rice_prod_months[ 'max_tons'] / rice_prod_months['total_prod'] # print (rice_prod_months) province_path = os.path.join(data_path, 'Vietnam_boundaries', 'boundaries_stats', 'province_level_stats.shp') commune_path = os.path.join(data_path, 'Vietnam_boundaries', 'boundaries_stats', 'commune_level_stats.shp') rd_prop_file = os.path.join(data_path, 'mode_properties', 'road_properties.xlsx') flow_output_excel = os.path.join(output_path, 'flow_mapping_paths', 'national_scale_flow_ods_road.xlsx') excl_wrtr = pd.ExcelWriter(flow_output_excel) flow_output_excel = os.path.join(output_path, 'flow_mapping_paths', 'national_scale_od_matrix_road.xlsx') excl_wrtr_reg = pd.ExcelWriter(flow_output_excel) # load provinces and get geometry of the right province provinces = gpd.read_file(province_path) provinces = provinces.to_crs({'init': 'epsg:4326'}) sindex_provinces = provinces.sindex # load provinces and get geometry of the right province communes = gpd.read_file(commune_path) communes = communes.to_crs({'init': 'epsg:4326'}) communes['province_name'] = communes.geometry.apply( lambda x: get_nearest_node(x, sindex_provinces, provinces, 'name_eng')) sindex_communes = communes.sindex # print (communes) modes_df = [] for m in range(len(modes_file_paths)): mode_data_path = os.path.join(data_path, modes_file_paths[m][0], modes_file_paths[m][1]) for file in os.listdir(mode_data_path): try: if file.endswith(".shp") and 'edges' in file.lower().strip(): edges_in = os.path.join(mode_data_path, file) if file.endswith(".shp") and 'nodes' in file.lower().strip(): nodes_in = os.path.join(mode_data_path, file) except: print('Network nodes and edge files necessary') # if modes[m] == 'road': # od_net = national_shapefile_to_network(edges_in,rd_prop_file) # od_net = add_igraph_generalised_costs_province_roads(od_net,1,vehicle_wt) # load nodes of the network nodes = gpd.read_file(nodes_in) nodes = nodes.to_crs({'init': 'epsg:4326'}) nodes.columns = map(str.lower, nodes.columns) node_cols = nodes.columns.values.tolist() node_cols = [c for c in node_cols if c not in ('population', 'od_id')] nodes = nodes[node_cols] sindex_nodes = nodes.sindex # print (sindex_nodes.tolist()) # assign province ID's and OD ID's to their nearest nodes # nodes['province_name'] = nodes.geometry.apply(lambda x: get_nearest_node(x,sindex_provinces,provinces,'name_eng')) # nodes['od_id'] = nodes.geometry.apply(lambda x: get_nearest_node(x,sindex_provinces,provinces,'od_id')) nodes['province_name'] = nodes.apply( lambda x: extract_gdf_values_containing_nodes( x, sindex_provinces, provinces, 'name_eng'), axis=1) nodes['od_id'] = nodes.apply( lambda x: extract_gdf_values_containing_nodes( x, sindex_provinces, provinces, 'od_id'), axis=1) # nodes['province_name'] = extract_gdf_values_containing_nodes(x,input_gdf,column_name) if modes[m] == 'road': edges_df = national_road_shapefile_to_dataframe( edges_in, rd_prop_file) nodes_vehs = list( zip(edges_df['from_node'].values.tolist(), edges_df['from_node'].values.tolist(), edges_df['vehicle_co'].values.tolist())) nd_veh_list = [] for nd in nodes['node_id'].values.tolist(): veh = 0.5 * sum( [int(v) for (f, t, v) in nodes_vehs if nd == f or nd == t]) nd_veh_list.append((nd, veh)) gdf_pops = pd.DataFrame(nd_veh_list, columns=['node_id', 'population']) del nd_veh_list nodes = pd.merge(nodes, gdf_pops, how='left', on=['node_id']).fillna(0) del gdf_pops elif modes[m] in ('inland', 'coastal'): nodes['population'] = nodes['tons'] else: xy_list = [] for iter_, values in nodes.iterrows(): # print (list(values.geometry.coords)) xy = list(values.geometry.coords) xy_list += [list(xy[0])] vor = Voronoi(np.array(xy_list)) regions, vertices = voronoi_finite_polygons_2d(vor) min_x = vor.min_bound[0] - 0.1 max_x = vor.max_bound[0] + 0.1 min_y = vor.min_bound[1] - 0.1 max_y = vor.max_bound[1] + 0.1 mins = np.tile((min_x, min_y), (vertices.shape[0], 1)) bounded_vertices = np.max((vertices, mins), axis=0) maxs = np.tile((max_x, max_y), (vertices.shape[0], 1)) bounded_vertices = np.min((bounded_vertices, maxs), axis=0) box = Polygon([[min_x, min_y], [min_x, max_y], [max_x, max_y], [max_x, min_y]]) # colorize poly_list = [] for region in regions: polygon = vertices[region] # Clipping polygon poly = Polygon(polygon) poly = poly.intersection(box) poly_list.append(poly) poly_index = list(np.arange(0, len(poly_list), 1)) poly_df = pd.DataFrame(list(zip(poly_index, poly_list)), columns=['gid', 'geometry']) gdf_voronoi = gpd.GeoDataFrame(poly_df, crs='epsg:4326') gdf_voronoi['node_id'] = gdf_voronoi.apply( lambda x: extract_nodes_within_gdf(x, nodes, 'node_id'), axis=1) gdf_voronoi['population'] = 0 gdf_voronoi = assign_value_in_area_proportions( communes, gdf_voronoi, 'population') # gdf_voronoi = assign_value_in_area_proportions_within_common_region(communes,gdf_voronoi,'population','province_name') gdf_pops = gdf_voronoi[['node_id', 'population']] # print (gdf_pops) del gdf_voronoi, poly_list, poly_df nodes = pd.merge(nodes, gdf_pops, how='left', on=['node_id']).fillna(0) del gdf_pops # nodes = nodes[['node_id','od_id','population']] nodes_sums = nodes.groupby(['od_id', 'node_id']).agg({'population': 'sum'}) nodes_frac = nodes_sums.groupby( level=0).apply(lambda x: x / float(x.sum())) nodes_frac = nodes_frac.reset_index(level=['od_id', 'node_id']) nodes_frac.rename(columns={'population': 'pop_frac'}, inplace=True) nodes = pd.merge(nodes, nodes_frac[['node_id', 'pop_frac']], how='left', on=['node_id']).fillna(0) # nodes.to_file(os.path.join(output_path,'networks_test','{}_nodes.shp'.format(modes[m]))) # print (nodes) modes_df.append(nodes) del nodes_frac, nodes_sums, nodes national_ods_df = [] for ind in ind_cols: national_ods_modes_df = [] for m in range(len(modes_file_paths)): nodes = modes_df[m] od_nodes_regions = list( zip(nodes['node_id'].values.tolist(), nodes['province_name'].values.tolist(), nodes['od_id'].values.tolist(), nodes['pop_frac'].values.tolist())) ind_mode = modes[m] + '_' + ind od_fracs[ind_mode] = od_fracs[modes[m]] * od_fracs[ind] od_fracs_ind = od_fracs[[o_id_col, d_id_col, ind_mode]] od_fracs_ind = od_fracs_ind[od_fracs_ind[ind_mode] > 0] od_flows = list( zip(od_fracs_ind[o_id_col].values.tolist(), od_fracs_ind[d_id_col].values.tolist(), od_fracs_ind[ind_mode].values.tolist())) origins = list(set(od_fracs_ind[o_id_col].values.tolist())) destinations = list(set(od_fracs_ind[d_id_col].values.tolist())) # print (od_flows) od_list = [] for o in origins: for d in destinations: fval = [ fl for (org, des, fl) in od_flows if org == o and des == d ] if len(fval) == 1 and fval[0] > 0: o_matches = [(item[0], item[1], item[3]) for item in od_nodes_regions if item[2] == o] if len(o_matches) > 0: for o_vals in o_matches: o_val = 1.0 * fval[0] * (1.0 * o_vals[2]) o_node = o_vals[0] o_region = o_vals[1] d_matches = [(item[0], item[1], item[3]) for item in od_nodes_regions if item[2] == d] if len(d_matches) > 0: for d_vals in d_matches: od_val = 1.0 * o_val * (1.0 * d_vals[2]) d_node = d_vals[0] d_region = d_vals[1] if od_val > 0 and o_node != d_node: od_list.append( (o_node, o_region, d_node, d_region, od_val)) print(o, d, fval, modes[m], ind) national_ods_modes_df.append( pd.DataFrame(od_list, columns=[ 'origin', 'o_region', 'destination', 'd_region', ind ])) del od_list, nodes national_ods_df.append(national_ods_modes_df) # all the crop OD pairs for file in os.listdir(crop_data_path): if file.endswith(".tif") and ('spam_p' in file.lower().strip()): fpath = os.path.join(crop_data_path, file) crop_name = [ cr for cr in crop_names if cr in file.lower().strip() ][0] outCSVName = os.path.join(output_path, 'crop_flows', 'crop_concentrations.csv') subprocess.run(["gdal2xyz.py", '-csv', fpath, outCSVName]) '''Load points and convert to geodataframe with coordinates''' load_points = pd.read_csv(outCSVName, header=None, names=['x', 'y', 'tons'], index_col=None) load_points = load_points[load_points['tons'] > 0] geometry = [Point(xy) for xy in zip(load_points.x, load_points.y)] load_points = load_points.drop(['x', 'y'], axis=1) crs = {'init': 'epsg:4326'} crop_points = gpd.GeoDataFrame(load_points, crs=crs, geometry=geometry) del load_points # clip all to province # prov_crop = gdf_geom_clip(crop_points,province_geom) if crop_name == 'rice': crop_points = assign_daily_min_max_tons_rice( crop_points, rice_prod_months) else: crop_points['min_{}'.format( crop_name)] = 1.0 * crop_points['tons'] / 365.0 crop_points['max_{}'.format( crop_name)] = 1.0 * crop_points['tons'] / 365.0 # crop_points_sindex = crop_points.sindex crop_points['province_name'] = crop_points.apply( lambda x: extract_gdf_values_containing_nodes( x, sindex_provinces, provinces, 'name_eng'), axis=1) national_ods_modes_df = [] for m in range(len(modes_file_paths)): nodes = modes_df[m] crop_pts = crop_points.copy(deep=True) crop_pts['node_id'] = crop_pts.apply( lambda x: get_nearest_node_within_region( x, nodes, 'node_id', 'province_name'), axis=1) # crop_points.to_file(os.path.join(output_path,'Voronoi','crop_test_2.shp')) crop_pts = crop_pts[crop_pts['node_id'] != ''] crop_pts = crop_pts[[ 'node_id', 'min_{}'.format(crop_name), 'max_{}'.format(crop_name) ]] crop_nodes = crop_pts.groupby([ 'node_id' ])['min_{}'.format(crop_name), 'max_{}'.format(crop_name)].sum().reset_index() crop_nodes = crop_nodes.reset_index() # crop_nodes.to_csv(os.path.join(output_path,'Voronoi','crop_test_2.csv'),index = False) del crop_pts nodes = pd.merge(nodes, crop_nodes, how='left', on=['node_id']).fillna(0) del crop_nodes crop_mode = modes[m] + '_' + crop_name if crop_name in ('rice', 'cereal', 'wheat'): od_fracs_crops[crop_mode] = od_fracs_crops[ modes[m]] * od_fracs_crops['rice'] else: od_fracs_crops[crop_mode] = od_fracs_crops[ modes[m]] * od_fracs_crops['indust-cro'] od_nodes_regions = list( zip(nodes['node_id'].values.tolist(), nodes['province_name'].values.tolist(), nodes['od_id'].values.tolist(), nodes['min_{}'.format(crop_name)].values.tolist(), nodes['max_{}'.format(crop_name)].values.tolist(), nodes['pop_frac'].values.tolist())) od_fracs_ind = od_fracs_crops[[o_id_col, d_id_col, crop_mode]] od_fracs_ind = od_fracs_ind[od_fracs_ind[crop_mode] > 0] od_flows = list( zip(od_fracs_ind[o_id_col].values.tolist(), od_fracs_ind[d_id_col].values.tolist(), od_fracs_ind[crop_mode].values.tolist())) origins = list(set(od_fracs_ind[o_id_col].values.tolist())) destinations = list(set( od_fracs_ind[d_id_col].values.tolist())) od_list = [] for o in origins: for d in destinations: fval = [ fl for (org, des, fl) in od_flows if org == o and des == d ] if len(fval) == 1 and fval[0] > 0: o_matches = [(item[0], item[1], item[3], item[4]) for item in od_nodes_regions if item[2] == o] if len(o_matches) > 0: for o_vals in o_matches: o_val_min = 1.0 * fval[0] * o_vals[2] o_val_max = 1.0 * fval[0] * o_vals[3] o_node = o_vals[0] o_region = o_vals[1] d_matches = [(item[0], item[1], item[5]) for item in od_nodes_regions if item[2] == d] if len(d_matches) > 0: for d_vals in d_matches: od_val_min = 1.0 * o_val_min * d_vals[ 2] od_val_max = 1.0 * o_val_max * d_vals[ 2] d_node = d_vals[0] d_region = d_vals[1] if od_val_max > 0 and o_node != d_node: od_list.append( (o_node, o_region, d_node, d_region, od_val_min, od_val_max)) print(o, d, fval, modes[m], crop_name) national_ods_modes_df.append( pd.DataFrame(od_list, columns=[ 'origin', 'o_region', 'destination', 'd_region', 'min_{}'.format(crop_name), 'max_{}'.format(crop_name) ])) del od_list, nodes del crop_points national_ods_df.append(national_ods_modes_df) national_ods_df = list(map(list, zip(*national_ods_df))) region_total = [] for m in range(len(modes_file_paths)): all_ods = pd.concat(national_ods_df[m], axis=0, sort='False', ignore_index=True).fillna(0) all_min_cols = ind_cols + ['min_{}'.format(c) for c in crop_names] all_ods['min_tons'] = all_ods[all_min_cols].sum(axis=1) all_max_cols = ind_cols + ['max_{}'.format(c) for c in crop_names] all_ods['max_tons'] = all_ods[all_max_cols].sum(axis=1) crops_norice = [cr for cr in crop_names if cr != 'rice'] for cr in crops_norice: all_ods.drop('min_{}'.format(cr), axis=1, inplace=True) all_ods.rename(columns={'max_{}'.format(cr): cr}, inplace=True) all_ods_val_cols = [ c for c in all_ods.columns.values.tolist() if c not in ('origin', 'o_region', 'destination', 'd_region') ] all_ods = all_ods.groupby( ['origin', 'o_region', 'destination', 'd_region'])[all_ods_val_cols].sum().reset_index() all_ods_regions = all_ods[['o_region', 'd_region'] + all_ods_val_cols] all_ods_regions = all_ods_regions.groupby( ['o_region', 'd_region'])[all_ods_val_cols].sum().reset_index() all_ods_regions.to_excel(excl_wrtr_reg, modes[m], index=False) excl_wrtr_reg.save() region_total.append(all_ods_regions) del all_ods_regions all_ods = all_ods[all_ods['max_tons'] > 0.5] # flow_output_csv = os.path.join(output_path,'flow_mapping_paths','national_scale_{}_ods.csv'.format(modes[m])) # all_ods.to_csv(flow_output_csv,index = False) all_ods.to_excel(excl_wrtr, modes[m], index=False) excl_wrtr.save() del all_ods