def get_maximal_rectangle(coordinates): """ Find the largest, inscribed, axis-aligned rectangle. :param coordinates: A list of of [x, y] pairs describing a closed, convex polygon. """ coordinates = np.array(coordinates) x_range = np.max(coordinates, axis=0)[0] - np.min(coordinates, axis=0)[0] y_range = np.max(coordinates, axis=0)[1] - np.min(coordinates, axis=0)[1] scale = np.array([x_range, y_range]) sc_coordinates = coordinates / scale poly = Polygon(sc_coordinates) inside_pt = (poly.representative_point().x, poly.representative_point().y) A1, A2, B = pts_to_leq(sc_coordinates) bl = cvxpy.Variable(2) tr = cvxpy.Variable(2) br = cvxpy.Variable(2) tl = cvxpy.Variable(2) obj = cvxpy.Maximize(cvxpy.log(tr[0] - bl[0]) + cvxpy.log(tr[1] - bl[1])) constraints = [ bl[0] == tl[0], br[0] == tr[0], tl[1] == tr[1], bl[1] == br[1], ] for i in range(len(B)): if inside_pt[0] * A1[i] + inside_pt[1] * A2[i] <= B[i]: constraints.append(bl[0] * A1[i] + bl[1] * A2[i] <= B[i]) constraints.append(tr[0] * A1[i] + tr[1] * A2[i] <= B[i]) constraints.append(br[0] * A1[i] + br[1] * A2[i] <= B[i]) constraints.append(tl[0] * A1[i] + tl[1] * A2[i] <= B[i]) else: constraints.append(bl[0] * A1[i] + bl[1] * A2[i] >= B[i]) constraints.append(tr[0] * A1[i] + tr[1] * A2[i] >= B[i]) constraints.append(br[0] * A1[i] + br[1] * A2[i] >= B[i]) constraints.append(tl[0] * A1[i] + tl[1] * A2[i] >= B[i]) prob = cvxpy.Problem(obj, constraints) #prob.solve(solver=cvxpy.CVXOPT, verbose=False, max_iters=1000, reltol=1e-9) #bottom_left = np.array(bl.value).T * scale #top_right = np.array(tr.value).T * scale #return list(bottom_left[0]), list(top_right[0]) prob.solve() bottom_left = np.array(bl.value).T * scale top_right = np.array(tr.value).T * scale return bottom_left.tolist(), top_right.tolist()
def write_centroides(shape,file_name): centroides = list() for s in shape: p = Polygon(s.points) if(p.is_valid): punto = (p.representative_point().x,p.representative_point().y) centroides.append(punto) else: punto = (p.centroid.x,p.centroid.y) centroides.append(punto) with open(file_name,'w') as f: f.write(('\n').join(list(map(lambda x: "{},{}".format(x[0],x[1]),centroides))))
def printCentroids(shapeAuto): for shape in shapeAuto: p = Polygon(shape.points) if(p.is_valid): return p.representative_point() else: return p.centroid
def polygonCenter(points): try: polygon = Polygon(points) except AssertionError: print('Could not turn coordinates to Polygon: {}'.format(points)) return False return np.array(polygon.representative_point()).tolist()
def get_tweets_map(self, where: List, lang: List = ['en'], pages: int = 25) -> Iterator[dict]: """Where: comma separated coordinates in polygon format""" for tweet in self.streaming_api(locations=where, languages=lang): MAP_RESPONSE['tweet'] = tweet['text'] polygon = Polygon(tweet['place']['bounding_box']['coordinates'][0]) centroid = polygon.representative_point().coords[0] MAP_RESPONSE['geometry']['coordinates'] = list(centroid)[::-1] yield MAP_RESPONSE
def get_maximal_rectangle(coordinates): """ Find the largest, inscribed, axis-aligned rectangle. :param coordinates: A list of of [x, y] pairs describing a closed, convex polygon. """ coordinates = np.array(coordinates) x_range = np.max(coordinates, axis=0)[0]-np.min(coordinates, axis=0)[0] y_range = np.max(coordinates, axis=0)[1]-np.min(coordinates, axis=0)[1] scale = np.array([x_range, y_range]) sc_coordinates = coordinates/scale poly = Polygon(sc_coordinates) inside_pt = (poly.representative_point().x, poly.representative_point().y) A1, A2, B = pts_to_leq(sc_coordinates) bl = cvxpy.Variable(2) tr = cvxpy.Variable(2) br = cvxpy.Variable(2) tl = cvxpy.Variable(2) obj = cvxpy.Maximize(cvxpy.log(tr[0] - bl[0]) + cvxpy.log(tr[1] - bl[1])) constraints = [bl[0] == tl[0], br[0] == tr[0], tl[1] == tr[1], bl[1] == br[1], ] for i in range(len(B)): if inside_pt[0] * A1[i] + inside_pt[1] * A2[i] <= B[i]: constraints.append(bl[0] * A1[i] + bl[1] * A2[i] <= B[i]) constraints.append(tr[0] * A1[i] + tr[1] * A2[i] <= B[i]) constraints.append(br[0] * A1[i] + br[1] * A2[i] <= B[i]) constraints.append(tl[0] * A1[i] + tl[1] * A2[i] <= B[i]) else: constraints.append(bl[0] * A1[i] + bl[1] * A2[i] >= B[i]) constraints.append(tr[0] * A1[i] + tr[1] * A2[i] >= B[i]) constraints.append(br[0] * A1[i] + br[1] * A2[i] >= B[i]) constraints.append(tl[0] * A1[i] + tl[1] * A2[i] >= B[i]) prob = cvxpy.Problem(obj, constraints) prob.solve(solver=cvxpy.CVXOPT, verbose=False, max_iters=1000, reltol=1e-9) bottom_left = np.array(bl.value).T * scale top_right = np.array(tr.value).T * scale return list(bottom_left[0]), list(top_right[0])
def generatePolygon(contours, hierarchy, contIndex): from shapely.geometry import Polygon # Is a geographic information system library children = np.where(hierarchy[:,3] == contIndex)[0] print("Children of {} are {}".format(contIndex, children)) polyVertices = contours[contIndex] holeVertices = [] for c in range(len(children)): holeVertices = holeVertices + contours[children[c]] if holeVertices == []: poly = Polygon(shell=polyVertices) else: poly = Polygon(shell=polyVertices, holes=[holeVertices]) print("Generated the polygon, returning holes") hole = poly.representative_point() return [[hole.x, hole.y]]
def add_boundary(boundary, start): # coords is an (n, 2) ordered list of points on the polygon boundary # the first and last points are the same, and there are no # guarentees on points not being duplicated (which will # later cause meshpy/triangle to shit a brick) coords = np.array(boundary.coords) # find indices points which occur only once, and sort them # to maintain order unique = np.sort(unique_rows(coords)[0]) cleaned = coords[unique] vertices.append(cleaned) facets.append(round_trip(start, len(cleaned))) # holes require points inside the region of the hole, which we find # by creating a polygon from the cleaned boundary region, and then # using a representative point. You could do things like take the mean of # the points, but this is more robust (to things like concavity), if slower. test = Polygon(cleaned) holes.append(np.array(test.representative_point().coords)[0]) return len(cleaned)
def add_boundary(boundary, start): # coords is an (n, 2) ordered list of points on the polygon boundary # the first and last points are the same, and there are no # guarentees on points not being duplicated (which will # later cause meshpy/triangle to shit a brick) coords = np.array(boundary.coords) # find indices points which occur only once, and sort them # to maintain order unique = np.sort(unique_rows(coords)[0]) cleaned = coords[unique] vertices.append(cleaned) facets.append(round_trip(start, len(cleaned))) # holes require points inside the region of the hole, which we find # by creating a polygon from the cleaned boundary region, and then # using a representative point. You could do things like take the mean of # the points, but this is more robust (to things like concavity), if slower. test = Polygon(cleaned) holes.append(np.array(test.representative_point().coords)[0]) return len(cleaned)
def _draw_path(self, axes, coords, codes, settings, fill=False, color='black', linewidth=0.5, label=None): face_path = mpath.Path(coords, codes, closed=True) face_patch = mpatches.PathPatch(face_path, fill=fill, color=color, linewidth=linewidth) axes.add_patch(face_patch) if label: poly_coords = list() num_curve_codes = 0 for pt, code in zip(coords, codes): if code is 'CURVE4': num_curve_codes += 1 if num_curve_codes == 3: poly_coords.append(pt) num_curve_codes = 0 else: poly_coords.append(pt) num_curve_codes = 0 rep_pt = None poly = Polygon(poly_coords) num_points = len(poly_coords) if poly.is_valid and poly.area > 0.075: rep_pt = poly.representative_point() fontsize = min(26 * math.sqrt(poly.area) + 2.0, 20) elif num_points > 45: sum_x = 0.0 sum_y = 0.0 for coord in coords: sum_x += coord[0] sum_y += coord[1] rep_pt = point.Point([sum_x / float(num_points), sum_y / float(num_points)]) fontsize = min(float(num_points) / 12.0, 20) if rep_pt: color = self.get_property(label, 'color', settings) plt.text(rep_pt.x, rep_pt.y, self.get_label_string(label, settings), size=fontsize, horizontalalignment='center', verticalalignment='center', bbox=dict(facecolor=color, edgecolor=color, boxstyle='round', alpha=0.75))
def to_polygon(cls, coords, holes=None, test_point=None): ''' Create shapely polygon from list of coordinate tuples if valid ''' if not coords or len(coords) < 3: return None # Fix for polygons crossing the 180th meridian lons = [lon for lon, lat in coords] if (max(lons) - min(lons) > 180): coords = [(lon + 360.0 if lon < 0 else lon, lat) for lon, lat in coords] if holes: holes = [(lon + 360.0 if lon < 0 else lon, lat) for lon, lat in holes] poly = Polygon(coords, holes) try: if test_point is None: test_point = poly.representative_point() invalid = cls.fix_invalid_polygons and not poly.is_valid and not poly.contains(test_point) except Exception: invalid = True if invalid: try: poly_fix = cls.fix_polygon(poly) if poly_fix is not None and poly_fix.bounds and len(poly_fix.bounds) == 4 and poly_fix.is_valid and poly_fix.type == poly.type: if test_point is None: test_point = poly_fix.representative_point() if poly_fix.contains(test_point): poly = poly_fix except Exception: pass return poly
def view_polygon_in_browser(single_polygon): if type(single_polygon) is not Polygon: single_polygon = Polygon(single_polygon) single_polygon_exterior = single_polygon.exterior if hasattr(single_polygon_exterior, 'coords'): # Debug polygon object by plotting on Leaflet map in web browser by rendering to an HTML file single_polygon_coords = [] for coord in list(zip(*single_polygon_exterior.coords.xy)): single_polygon_coords.append([coord[0], coord[1]]) single_poly_rep = single_polygon.representative_point() temp_map_plot_filename = "mapbox-polygon-temp.html" jinja2.Template(open("templates/single-polygon.html").read()).stream( MAPBOX_ACCESS_TOKEN=os.environ['MAPBOX_ACCESS_TOKEN'], MAP_CENTER_POINT_COORD="[" + str(single_poly_rep.x) + "," + str(single_poly_rep.y) + "]", MAP_LAYER_GEOJSON=[single_polygon_coords] ).dump(temp_map_plot_filename) webbrowser.open_new("file://" + os.getcwd() + "/" + temp_map_plot_filename) time.sleep(3) os.remove(temp_map_plot_filename)
def create_farm_plot(self, journal_num, minimum_geo_size): self.fig = plt.figure(1, figsize=(16,9),dpi=150) plt.clf() ax = self.fig.add_subplot(111) m = 1 crop_types = [] not_analyzed = False analyzed = False for shaperec in self.sf.iterShapeRecords(): if(shaperec.record[0] == journal_num): shape = shaperec.shape field_num = shaperec.record[1] crop_type = shaperec.record[4] polygon = Polygon(shape.points) plot_zone(ax, polygon.exterior) x_field_l = [i[0] for i in shape.points[:]] y_field_l = [i[1] for i in shape.points[:]] centroid = (sum(x_field_l) / len(x_field_l), sum(y_field_l) / len(x_field_l)) try: rep_point = polygon.representative_point() except ValueError: rep_point = Point([centroid[0], centroid[1]]) rep_point_calc = False if(int((shaperec.record[2]).partition(',')[0]) > minimum_geo_size): analyzed = True m = m + 1 patch1 = PolygonPatch(polygon, fc=GRAY, ec=GRAY, alpha=0.5, zorder=2) ax.add_patch(patch1) ax.text(rep_point.x, rep_point.y,m,color='b') found_crop = False for nj in range(0,len(crop_types)): if((crop_types[nj])[0] == crop_type): found_crop = True (crop_types[nj])[1] = (crop_types[nj])[1]+1 if(not found_crop): crop_types.append([crop_type, 1]) else: not_analyzed = True patch2 = PolygonPatch(polygon, fc=RED, ec=RED, alpha=0.5, zorder=2) ax.add_patch(patch2) if(analyzed and not_analyzed): ax.legend([patch1,patch2],['Analyzed field areas','field areas <'+ str(minimum_geo_size)+ 'ha'],loc=2) elif(analyzed): ax.legend([patch1],['Analyzed field areas'],loc=2) elif(not_analyzed): ax.legend([patch2],['field areas <'+ str(minimum_geo_size)+ 'ha'],loc=2) ax.set_xlabel('UTM meter - Easting') ax.set_ylabel('UTM meter - Northing') ax.set_title("Fields overview: "+journal_num) plt.axis('equal') self.fig.tight_layout() image_name = journal_num+'_fields' + '.png' plt.savefig(self.plot_folder+image_name) self.fig = plt.figure(1, figsize=(16,7),dpi=150) plt.clf() image_name_bar = journal_num+'_fields_analyzed_barplot' + '.png' objects = [] value = [] for i in range(0,len(crop_types)): objects.append(((crop_types[i])[0]).decode('utf-8', 'ignore')) value.append(int((crop_types[i])[1])) y_pos = numpy.arange(len(objects)) plt.bar(y_pos, value, align='center', fc=BLUE, alpha=0.5, zorder=2) plt.xticks(y_pos, objects, fontsize=16) plt.ylabel('Number of fields', fontsize=16) plt.title('Types of analysed fields', fontsize=16) self.fig.tight_layout() plt.savefig(self.plot_folder+image_name_bar) return [image_name, image_name_bar]
image_name = journal_num + '_fields' + '.png' data = (i.read()) % (image_name, image_name[:-4]) for shaperec in sf.iterShapeRecords(): if (shaperec.record[0] == journal_num): shape = shaperec.shape field_num = shaperec.record[1] polygon = Polygon(shape.points) plot_zone(ax, polygon.exterior) x_field_l = [i[0] for i in shape.points[:]] y_field_l = [i[1] for i in shape.points[:]] centroid = (sum(x_field_l) / len(x_field_l), sum(y_field_l) / len(x_field_l)) try: rep_point = polygon.representative_point() except ValueError: rep_point = Point([centroid[0], centroid[1]]) rep_point_calc = False if (int( (shaperec.record[2]).partition(',')[0]) > minimum_geo_size): m = m + 1 patch1 = PolygonPatch(polygon, fc=GRAY, ec=GRAY, alpha=0.5, zorder=2) ax.add_patch(patch1) ax.text(rep_point.x, rep_point.y, m, color='b') print field_num else:
def stand_ref_point(self, lon_point_list, lat_point_list): polygon_geom = Polygon(zip(lon_point_list, lat_point_list)) stand_ref_pt = polygon_geom.representative_point() return stand_ref_pt
def centroid(shape): poligono = Polygon(shape.points) if poligono.is_valid: return poligono.representative_point() else: return poligono.centroid
A_ref = [] A_ref += [0] * 20 A_ref += [1] * 50 A_ref += [0] * 30 point_sec = [] point_sec_lat_Y = [] point_sec_long_X = [] A_sec = [] A_sec += [0] * 30 A_sec += [1] * 60 A_sec += [0] * 10 i = 0 while i < 50: poly_ref.append(poly0) point_sec.append(poly0.representative_point()) point_sec_long_X.append(poly0.representative_point().x) point_sec_lat_Y.append(poly0.representative_point().y) ##olypoints.append(poly0.representative_point()) #generate a point within poly0 i += 1 while i < 80: poly_ref.append(poly0) point_sec.append(poly1.representative_point()) point_sec_long_X.append(poly1.representative_point().x) point_sec_lat_Y.append(poly1.representative_point().y) i += 1 while i < 100: poly_ref.append(poly1) point_sec.append(poly1.representative_point()) point_sec_long_X.append(poly1.representative_point().x) point_sec_lat_Y.append(poly1.representative_point().y)
from atmospheres.controller.geo_json import sf_geo_json import random from atmospheres.models.tweet import Tweet # this will be a list of (Polygon,zip_code) tuples zip_polygons = [] sf_zipcode_array = [] neighborhoods = [] representative_points = {} # populate shapely polygon list for feature in sf_geo_json["features"]: polygon = Polygon(feature["geometry"]["geometries"][0]["coordinates"][0]) zip_code = feature["id"] representative_points[zip_code] = [polygon.representative_point().x,polygon.representative_point().y] sf_zipcode_array.append(str(zip_code)) neighborhoods.append(feature["neighborhood"]) zip_polygons.append((polygon,zip_code)) def get_mongo_reader(): """Returns an instance of the MongoBridge that is connected to the database defined in properties.py """ return DataStore(DATABASE_NAME, COLLECTION_NAME)
geojson = json.load(f) tractFeatures = geojson["features"] print("Read %s tract features" % len(tractFeatures)) if len(tractFeatures) < 1: print("No tract features found.") sys.exit() print("Calculating census tract lat/lon centroids...") tracts = [] tractCount = len(tractFeatures) for i, feat in enumerate(tractFeatures): props = feat["properties"].copy() coords = feat["geometry"]["coordinates"][0][0] poly = Polygon(coords) lonlat = poly.representative_point().coords[:][0] props["lonlat"] = lonlat tracts.append(props) printProgress(i + 1, tractCount) tractCoords = np.asarray([t["lonlat"] for t in tracts]) def matchCensusTract(cdataLookup, tract): boroughCodes = { "1": "36061", # Manhattan "2": "36005", # Bronx "3": "36047", # Brooklyn (King's County) "4": "36081", # Queens "5": "36085", # Staten Island (Richmond Count) } if "" + tract["boro_code"] not in boroughCodes:
deg_to_dms(coord[0], 'lon') + ' COLOR_AirspaceD') else: first_coord = coord previous_coord = coord ShapelyData.append([coord[1], coord[0]]) output.append( deg_to_dms(previous_coord[1]) + ' ' + deg_to_dms(previous_coord[0], 'lon') + ' ' + deg_to_dms(first_coord[1]) + ' ' + deg_to_dms(first_coord[0], 'lon') + ' COLOR_AirspaceD') # Text Label Position Calculation ShapelyShape = Polygon(ShapelyData) centroid = ShapelyShape.representative_point() center = centroid if center in text_already_place: # Move it bigmaths = travelFrom(center.x, center.y, 180) center = Point(bigmaths[0], bigmaths[1]) text_already_place.append(center) # Calculating text value vertical_limit = '' if j['properties']['lowerUnit'] == 'FT': vertical_limit += str( int(int(j['properties']['lowerValue']) / 100)).rjust( 3, '0')
def save_to_mongo(attrs, tr_id=None, collection_name=None): """returns the saved object or an empty list""" result_list=[] #turn our classifier string into a python dict p=str(attrs['classifiers']).split(".") if len(p)==2: attrs['classifiers']={"type":p[0], "category":p[1], "subcategory":""} if len(p)==3: attrs['classifiers']={"type":p[0], "category":p[1], "subcategory":p[2]} attrs['classifiers']=json.dumps(attrs['classifiers']) """ Make sure the a single 2d index exists in geometry_centroid """ """Make sure our coordinates are a list, not a string """ if attrs.has_key('bounds'): attrs['bounds']=json.loads(attrs['bounds']) """Make sure our coordinates are a list, not a string """ if attrs.has_key('geometry_coordinates') and attrs.has_key('geometry_type'): attrs['geometry_coordinates']=json.loads(attrs['geometry_coordinates']) if str(attrs['geometry_type'])=="Polygon" or \ str(attrs['geometry_type'])=="MultiPolygon" : attrs['geometry_polygon'] = attrs['geometry_coordinates'] del attrs['geometry_coordinates'] if not attrs.has_key('geometry_centroid'): centroid = Polygon(attrs['geometry_polygon']) centroidpoint = centroid.representative_point()._get_coords()[0] attrs['geometry_centroid'] = list(centroidpoint) if str(attrs['geometry_type'])=="LineString": attrs['geometry_linestring'] = attrs['geometry_coordinates'] centroid = LineString(attrs['geometry_linestring']) centroidpoint= centroid.representative_point()._get_coords()[0] del attrs['geometry_coordinates'] attrs['geometry_centroid'] = list(centroidpoint) elif str(attrs['geometry_type'])=="Point": attrs['geometry_centroid'] = attrs['geometry_coordinates'] try: mconnection = Connection(settings.MONGO_HOST, settings.MONGO_PORT) db = mconnection[settings.MONGO_DB_NAME] if not collection_name: """if no collection given, use the main one""" transactions = db[settings.MONGO_DB_NAME] else: transactions = db[collection_name] history = db[settings.MONGO_HISTORYDB_NAME] except: print str(sys.exc_info()) result_list=[] s=Since.objects.get(pk=1) try: """Convert alt_names into a list""" if attrs.has_key('alt_names'): attrs['alt_names']=json.loads(attrs['alt_names']) """Convert tags into a list""" if attrs.has_key('tags'): attrs['tags']=json.loads(attrs['tags']) if attrs.has_key('classifiers'): attrs['classifiers']=json.loads(attrs['classifiers']) if tr_id: """Copy the old tx to the historical collection""" responsedict=raw_query_mongo_db({'id': tr_id}) hist_id=history.insert(responsedict['results']) """Use the original tx_id handle""" attrs['id']=str(tr_id) """Set the new uuid""" s=Since.objects.get(pk=1) """Set the Since ID""" attrs['sinceid']=s.sinceid if attrs.has_key('alt_names'): attrs['alt_names']=json.loads(attrs['alt_names']) if attrs.has_key('tags'): attrs['tags']=json.loads(attrs['tags']) attrs['_id']=str(uuid.uuid4()) attrs['history']=True attrs['verified']=False attrs['epoch']=build_utcnow_epoch_timestamp() #ensure the old version is out of the main collection my_id=transactions.remove({"id":tr_id }) #insert the updated version my_id=transactions.insert(attrs) mysearchresult=transactions.find({'_id':attrs['_id']}) else: """The feature is NEW""" """ Check to see if the a similar item exists closeby. If so, flag this as a possible duplicate. """ attrs=check_for_pos_dupes_via_geoloc(attrs, collection_name=collection_name) """Set the new uuid""" attrs['_id']=str(uuid.uuid4()) """build the tr_id a.k.a. handle""" if attrs['geometry_type']=='Point': attrs['id']=build_geohash_id(attrs['geometry_centroid'][0], attrs['geometry_centroid'][1]) else: attrs['id']=build_pretty_id(attrs['_id']) """Set the Since ID""" attrs['sinceid']=s.sinceid attrs['verified']=False attrs['epoch']=build_utcnow_epoch_timestamp() my_id=transactions.insert(attrs) mysearchresult=transactions.find({'_id':attrs['_id']}) if attrs['classifiers'].has_key('subcategory'): if attrs['classifiers']['subcategory'] in ("country", "subdivision", "level-2"): update_or_create_area(attrs, attrs['classifiers']['subcategory'], mysearchresult[0]['id']) for d in mysearchresult: d=unflatten(d) result_list.append(d) d['type']="Feature" """Increment the sinceid""" s.sinceid=int(s.sinceid) + 1 s.save() except: print sys.exc_info() result_list=[] return result_list
def save_to_mongo(attrs, tr_id=None, collection_name=None): """returns the saved object or an empty list""" result_list = [] #turn our classifier string into a python dict p = str(attrs['classifiers']).split(".") if len(p) == 2: attrs['classifiers'] = { "type": p[0], "category": p[1], "subcategory": "" } if len(p) == 3: attrs['classifiers'] = { "type": p[0], "category": p[1], "subcategory": p[2] } attrs['classifiers'] = json.dumps(attrs['classifiers']) """ Make sure the a single 2d index exists in geometry_centroid """ """Make sure our coordinates are a list, not a string """ if attrs.has_key('bounds'): attrs['bounds'] = json.loads(attrs['bounds']) """Make sure our coordinates are a list, not a string """ if attrs.has_key('geometry_coordinates') and attrs.has_key( 'geometry_type'): attrs['geometry_coordinates'] = json.loads( attrs['geometry_coordinates']) if str(attrs['geometry_type'])=="Polygon" or \ str(attrs['geometry_type'])=="MultiPolygon" : attrs['geometry_polygon'] = attrs['geometry_coordinates'] del attrs['geometry_coordinates'] if not attrs.has_key('geometry_centroid'): centroid = Polygon(attrs['geometry_polygon']) centroidpoint = centroid.representative_point()._get_coords( )[0] attrs['geometry_centroid'] = list(centroidpoint) if str(attrs['geometry_type']) == "LineString": attrs['geometry_linestring'] = attrs['geometry_coordinates'] centroid = LineString(attrs['geometry_linestring']) centroidpoint = centroid.representative_point()._get_coords()[0] del attrs['geometry_coordinates'] attrs['geometry_centroid'] = list(centroidpoint) elif str(attrs['geometry_type']) == "Point": attrs['geometry_centroid'] = attrs['geometry_coordinates'] try: mconnection = Connection(settings.MONGO_HOST, settings.MONGO_PORT) db = mconnection[settings.MONGO_DB_NAME] if not collection_name: """if no collection given, use the main one""" transactions = db[settings.MONGO_DB_NAME] else: transactions = db[collection_name] history = db[settings.MONGO_HISTORYDB_NAME] except: print str(sys.exc_info()) result_list = [] s = Since.objects.get(pk=1) try: """Convert alt_names into a list""" if attrs.has_key('alt_names'): attrs['alt_names'] = json.loads(attrs['alt_names']) """Convert tags into a list""" if attrs.has_key('tags'): attrs['tags'] = json.loads(attrs['tags']) if attrs.has_key('classifiers'): attrs['classifiers'] = json.loads(attrs['classifiers']) if tr_id: """Copy the old tx to the historical collection""" responsedict = raw_query_mongo_db({'id': tr_id}) hist_id = history.insert(responsedict['results']) """Use the original tx_id handle""" attrs['id'] = str(tr_id) """Set the new uuid""" s = Since.objects.get(pk=1) """Set the Since ID""" attrs['sinceid'] = s.sinceid if attrs.has_key('alt_names'): attrs['alt_names'] = json.loads(attrs['alt_names']) if attrs.has_key('tags'): attrs['tags'] = json.loads(attrs['tags']) attrs['_id'] = str(uuid.uuid4()) attrs['history'] = True attrs['verified'] = False attrs['epoch'] = build_utcnow_epoch_timestamp() #ensure the old version is out of the main collection my_id = transactions.remove({"id": tr_id}) #insert the updated version my_id = transactions.insert(attrs) mysearchresult = transactions.find({'_id': attrs['_id']}) else: """The feature is NEW""" """ Check to see if the a similar item exists closeby. If so, flag this as a possible duplicate. """ attrs = check_for_pos_dupes_via_geoloc( attrs, collection_name=collection_name) """Set the new uuid""" attrs['_id'] = str(uuid.uuid4()) """build the tr_id a.k.a. handle""" if attrs['geometry_type'] == 'Point': attrs['id'] = build_geohash_id(attrs['geometry_centroid'][0], attrs['geometry_centroid'][1]) else: attrs['id'] = build_pretty_id(attrs['_id']) """Set the Since ID""" attrs['sinceid'] = s.sinceid attrs['verified'] = False attrs['epoch'] = build_utcnow_epoch_timestamp() my_id = transactions.insert(attrs) mysearchresult = transactions.find({'_id': attrs['_id']}) if attrs['classifiers'].has_key('subcategory'): if attrs['classifiers']['subcategory'] in ("country", "subdivision", "level-2"): update_or_create_area(attrs, attrs['classifiers']['subcategory'], mysearchresult[0]['id']) for d in mysearchresult: d = unflatten(d) result_list.append(d) d['type'] = "Feature" """Increment the sinceid""" s.sinceid = int(s.sinceid) + 1 s.save() except: print sys.exc_info() result_list = [] return result_list
def representative_point(self) -> Position: shape = Polygon([p.tuple_z() for p in self.vertexes]) point = shape.representative_point() return Position(lat=point.y, long=point.x)