def testFullText(self): notice = area_notice.AreaNotice( area_notice.notice_type['cau_mammals_not_obs'], datetime.datetime( datetime.datetime.utcnow().year, 7, 6, 0, 0, 4), 60, 10, source_mmsi=2) notice.add_subarea(area_notice.AreaNoticeCirclePt(-69.5, 42, radius=0)) # 1 text_sections = ( '12345678901234', # 2 'More text that', # 3 ' spans across ', # 4 'multiple lines', # 5 ' The text is ', # 6 'supposed to be', # 7 ' cated togethe', # 8 'r. 12345678901' # 9 ) for text in text_sections: notice.add_subarea(area_notice.AreaNoticeFreeText(text=text)) expected = ''.join(text_sections).upper() self.assertEqual(notice.get_merged_text(), expected) orig = geojson.loads(geojson.dumps(notice)) decoded = geojson.loads( geojson.dumps( area_notice.AreaNotice( nmea_strings=[ line for line in notice.get_aivdm()]))) self.assertAlmostEqualGeojson(orig, decoded)
def get_city_geojson(municipio): """ Pega o geojson a partir do banco de dados :param municipio: geocódigo do municipio :return: """ with db_engine.connect() as conn: head = r'{"type": "FeatureCollection", "features":[' tail = ']}' res = conn.execute( ''' select geocodigo, nome, geojson, populacao, uf from "Dengue_global"."Municipio" where geocodigo=%s ''', (municipio,) ) datum = dict(res.fetchone().items()) feat = geojson.loads(datum['geojson']) feat['type'] = 'Feature' feat['properties'] = {'geocodigo': datum['geocodigo'], 'nome': datum['nome'], 'populacao': datum['populacao']} geoj = geojson.loads(head + geojson.dumps(feat) + tail) return geoj
def compare_osm_kenyaopendata(): osm = geojson.loads(readfile('kibera-schools-osm.geojson')) kod = geojson.loads(readfile('kibera-primary-secondary-schools.geojson')) result = {} result['type'] = 'FeatureCollection' result['features'] = [] #TODO make sure all features in KOD are in OSM (through osmly) for feature in osm.features: points = [(feature.geometry.coordinates[0], feature.geometry.coordinates[1])] properties = {} #properties = feature.properties for osm_property in feature.properties.keys(): properties[ "osm:" + osm_property ] = feature.properties[ osm_property ] if 'official_name' in feature.properties: for kod_feature in kod.features: if 'official_name' in kod_feature.properties and kod_feature.properties['official_name'] == feature.properties['official_name']: #print feature.properties['official_name'] points.append((kod_feature.geometry.coordinates[0], kod_feature.geometry.coordinates[1])) for kod_property in kod_feature.properties.keys(): if kod_property != 'lat' and kod_property != 'lon': properties[ "kenyaopendata:" + kod_property] = kod_feature.properties[ kod_property ] geom = MultiPoint(points) result['features'].append( { "type": "Feature", "properties": properties, "geometry": geom }) dump = geojson.dumps(result, sort_keys=True, indent=2) writefile('kibera-combined-schools.geojson',dump)
def test_01_full_text(self): 'full text' notice = an.AreaNotice(an.notice_type['cau_mammals_not_obs'],datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4),60,10, source_mmsi=2) notice.add_subarea(an.AreaNoticeCirclePt(-69.5, 42, radius=0)) # 1 text_sections = ( '12345678901234', # 2 'More text that', # 3 ' spans across ', # 4 'multiple lines', # 5 ' The text is ', # 6 'supposed to be', # 7 ' cated togethe', # 8 'r. 12345678901' # 9 ) for text in text_sections: notice.add_subarea(an.AreaNoticeFreeText(text=text)) expected = ''.join(text_sections).upper() # if notice.get_merged_text() != expected: # sys.exit('OH CRAP....\n %s\n %s\n' % (notice.get_merged_text(), expected) ) self.failUnless(notice.get_merged_text() == expected) orig = geojson.loads( geojson.dumps(notice) ) decoded = geojson.loads( geojson.dumps(an.AreaNotice(nmea_strings=[ line for line in notice.get_aivdm() ] )) ) #sys.stderr.write('\norig:'+str(orig)+'\n') #sys.stderr.write('decoded:'+str(decoded)+'\n\n') #sys.stderr.write('orig:'+str(orig['bbm']['freetext'])+'\n') #sys.stderr.write('deco:'+str(decoded['bbm']['freetext'])+'\n') #if not almost_equal_geojson(orig, decoded): # sys.exit('FULL FREETEXT FAIL') self.failUnless( almost_equal_geojson(orig, decoded) )
def test_01point(self): 'point' year = datetime.datetime.utcnow().year pt1 = an.AreaNotice(an.notice_type['cau_mammals_not_obs'],datetime.datetime(year,8,6,0,1,0),60,10, source_mmsi = 445566778) pt1.add_subarea(an.AreaNoticeCirclePt(-69.8, 42.0, radius=0)) orig = geojson.loads( geojson.dumps(pt1) ) decoded_pt = an.AreaNotice(nmea_strings=[ line for line in pt1.get_aivdm() ] ) #sys.stderr.write('\npt1_start: '+str(pt1.when)+'\n') #sys.stderr.write('decoded_pt_start:'+str(decoded_pt.when)+'\n') decoded = geojson.loads( geojson.dumps(decoded_pt) ) #sys.stderr.write('pt1:'+str(pt1)+'\n') #sys.stderr.write('orig_start:'+str( orig['bbm']['start'])+'\n') #sys.stderr.write('deco_start:'+str(decoded['bbm']['start'])+'\n') #sys.stderr.write('orig: ' + str(orig) + '\n') #sys.stderr.write('deco: ' + str(decoded) + '\n') if not almost_equal_geojson(orig, decoded, verbose=True): sys.exit('1: That had better work! But it did not!!!') #else: # sys.stderr.write('try_1: ok!\n') self.failUnless( almost_equal_geojson(orig, decoded) )
def test_whales(self): 'whales observed circle notice' zone_type = an.notice_type['cau_mammals_reduce_speed'] circle = an.AreaNotice(zone_type,datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4),60,10, source_mmsi = 123456789) circle.add_subarea(an.AreaNoticeCirclePt(-69.8, 42.0, radius=4260)) self.assertEqual(zone_type, 1) self.assertEqual(zone_type, circle.area_type) json = geojson.dumps(circle) data = geojson.loads(json) # Get the data as a dictionary so that we can verify the contents self.assertEqual(zone_type, data['bbm']['area_type']) self.assertEqual(an.notice_type[zone_type], data['bbm']['area_type_desc']) # now try to pass the message as nmea strings and decode the message aivdms = [] for line in circle.get_aivdm(): aivdms.append(line) del circle del data del json notice = an.AreaNotice(nmea_strings=aivdms) self.assertEqual(zone_type,notice.area_type) json = geojson.dumps(notice) data = geojson.loads(json) # Get the data as a dictionary so that we can verify the contents self.assertEqual(zone_type, data['bbm']['area_type']) self.assertEqual(an.notice_type[zone_type], data['bbm']['area_type_desc'])
def testPoint(self): # One of each. notice = area_notice.AreaNotice( area_notice.notice_type['cau_mammals_not_obs'], datetime.datetime( datetime.datetime.utcnow().year, 7, 6, 0, 0, 4), 60, 10, source_mmsi=666555444) notice.add_subarea(area_notice.AreaNoticeCirclePt(-69.8, 40.001, radius=0)) notice.add_subarea( area_notice.AreaNoticeCirclePt(-69.8, 40.202, radius=2000)) notice.add_subarea( area_notice.AreaNoticeRectangle(-69.6, 40.3003, 2000, 1000, 0)) notice.add_subarea( area_notice.AreaNoticeSector(-69.4, 40.40004, 6000, 10, 50)) notice.add_subarea( area_notice.AreaNoticePolyline([(170, 7400)], -69.2, 40.5000005)) notice.add_subarea( area_notice.AreaNoticePolygon( [(10, 1400), (90, 1950)], -69.0, 40.6000001)) notice.add_subarea(area_notice.AreaNoticeFreeText(text='Some Text')) orig = geojson.loads(geojson.dumps(notice)) nmea_strings = [line for line in notice.get_aivdm()] decoded = geojson.loads( geojson.dumps(area_notice.AreaNotice(nmea_strings=nmea_strings))) self.assertAlmostEqualGeojson(orig, decoded)
def convert2geojson(): #KOD os.system("rm kibera-primary-schools.geojson") os.system("ogr2ogr -f GeoJSON kibera-primary-schools.geojson kibera-primary-schools.vrt") os.system("rm kibera-secondary-schools.geojson") os.system("ogr2ogr -f GeoJSON kibera-secondary-schools.geojson kibera-secondary-schools.vrt") kod_primary = geojson.loads(readfile('kibera-primary-schools.geojson')) kod_secondary = geojson.loads(readfile('kibera-secondary-schools.geojson')) kod_primary.features.extend(kod_secondary.features) dump = geojson.dumps(kod_primary, sort_keys=True, indent=2) writefile('kibera-primary-secondary-schools.geojson',dump) #OSM os.system("osmtogeojson -e kibera-schools-osm.xml > kibera-schools-osm.geojson") os.system("osmtogeojson -e mathare-schools-osm.xml > mathare-schools-osm.geojson") os.system("osmtogeojson -e kangemi-schools-osm.xml > kangemi-schools-osm.geojson") clean_osm('kibera-schools-osm.geojson') clean_osm('mathare-schools-osm.geojson') clean_osm('kangemi-schools-osm.geojson') osm_kibera = geojson.loads(readfile('kibera-schools-osm.geojson')) osm_mathare = geojson.loads(readfile('mathare-schools-osm.geojson')) osm_kangemi = geojson.loads(readfile('kangemi-schools-osm.geojson')) osm_kibera.features.extend(osm_mathare.features) osm_kibera.features.extend(osm_kangemi.features) dump = geojson.dumps(osm_kibera, sort_keys=True, indent=2) writefile('nairobi-schools-osm.geojson', dump)
def test_polygon(self): 'polygon' poly1 = an.AreaNotice(an.notice_type['cau_divers'], datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4), 60, 10, source_mmsi=987123456) poly1.add_subarea(an.AreaNoticePolygon([(10,1400),(90,1950)], -69.8, 42.5 )) orig = geojson.loads( geojson.dumps(poly1) ) poly2 = an.AreaNotice(nmea_strings=[ line for line in poly1.get_aivdm() ] ) decoded = geojson.loads( geojson.dumps(poly2) ) self.assertAlmostEqualGeojson(orig, decoded)
def test_line(self): 'line' line1 = an.AreaNotice(an.notice_type['report_of_icing'],datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4),60,10, source_mmsi=123456) line1.add_subarea(an.AreaNoticePolyline([(10,2400),], -69.8, 42.4 )) orig = geojson.loads( geojson.dumps(line1) ) line2 = an.AreaNotice(nmea_strings=[ line for line in line1.get_aivdm() ] ) decoded = geojson.loads( geojson.dumps(line2) ) self.assertAlmostEqualGeojson(orig, decoded)
def _check_region_json(area): if area is not None: try: geojson.loads(json.dumps(area)) except ValueError: raise_user_exc(REGION_INVALID) return True
def test_sector(self): 'sector' sec1 = an.AreaNotice(an.notice_type['cau_habitat_reduce_speed'], datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4), 60, 10, source_mmsi = 456) sec1.add_subarea(an.AreaNoticeSector(-69.8, 42.3, 4000, 10, 50)) orig = geojson.loads( geojson.dumps(sec1) ) decoded = geojson.loads( geojson.dumps(an.AreaNotice(nmea_strings=[ line for line in sec1.get_aivdm() ] )) ) self.assertAlmostEqualGeojson(orig, decoded)
def convert2geojson(): os.system("ogr2ogr -f GeoJSON kibera-primary-schools.geojson kibera-primary-schools.vrt") os.system("ogr2ogr -f GeoJSON kibera-secondary-schools.geojson kibera-secondary-schools.vrt") kod_primary = geojson.loads(readfile('kibera-primary-schools.geojson')) kod_secondary = geojson.loads(readfile('kibera-secondary-schools.geojson')) kod_primary.features.extend(kod_secondary.features) dump = geojson.dumps(kod_primary, sort_keys=True, indent=2) writefile('kibera-primary-secondary-schools.geojson',dump) os.system("osmtogeojson kibera-schools-osm.xml > kibera-schools-osm.geojson")
def extract(self, target, data): t = data.read() try: data = geojson.loads(t) except: # possibly malformed json? start = min(t.find('{'), t.find('[')) data = geojson.loads(t[start:]) result = geojson.is_valid(data) if result['valid'] == 'yes': return [('geojson', data)]
def test_freetext(self): 'freetext' text1 = an.AreaNotice(an.notice_type['res_military_ops'],datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 4, 0), 60,10, source_mmsi=300000000) text1.add_subarea(an.AreaNoticeCirclePt(-69.8, 42.6, radius=0)) text1.add_subarea(an.AreaNoticeFreeText(text="Explanation")) orig = geojson.loads( geojson.dumps(text1) ) text2 = an.AreaNotice(nmea_strings=[ line for line in text1.get_aivdm() ] ) decoded = geojson.loads( geojson.dumps(text2) ) self.assertAlmostEqualGeojson(orig, decoded)
def test_rect(self): 'rectangle' rect = an.AreaNotice( an.notice_type['cau_mammals_reduce_speed'], datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4), 60, 10, source_mmsi = 123 ) rect.add_subarea( an.AreaNoticeRectangle(-69.8, 42, 4000, 1000, 0) ) orig = geojson.loads( geojson.dumps(rect) ) decoded = geojson.loads( geojson.dumps(an.AreaNotice(nmea_strings=[ line for line in rect.get_aivdm() ] )) ) self.assertAlmostEqualGeojson(orig, decoded)
def test_freetext(self): 'freetext' text1 = an.AreaNotice(an.notice_type['res_military_ops'],datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 4, 0), 60,10, source_mmsi=300000000) text1.add_subarea(an.AreaNoticeCirclePt(-69.8, 42.6, radius=0)) text1.add_subarea(an.AreaNoticeFreeText(text="Explanation")) orig = geojson.loads( geojson.dumps(text1) ) text2 = an.AreaNotice(nmea_strings=[ line for line in text1.get_aivdm() ] ) decoded = geojson.loads( geojson.dumps(text2) ) if not almost_equal_geojson(orig, decoded, verbose=True): sys.exit('FREE TEXT FAIL') self.failUnless( almost_equal_geojson(orig, decoded, verbose=True) )
def test_02circle(self): 'circle' now = datetime.datetime.utcnow() circle1 = an.AreaNotice(an.notice_type['cau_mammals_reduce_speed'], # Don't use seconds. Can only use minutes datetime.datetime(now.year, 7, 6, 0, 0, 0), 60, 10, source_mmsi = 2) circle1.add_subarea(an.AreaNoticeCirclePt(-69.8, 42.1, radius=4260)) orig = geojson.loads( geojson.dumps(circle1) ) nmea_strings=[ line for line in circle1.get_aivdm()] decoded = geojson.loads( geojson.dumps(an.AreaNotice(nmea_strings=nmea_strings))) self.assertAlmostEqualGeojson(orig, decoded)
def test_01point(self): 'point' year = datetime.datetime.utcnow().year pt1 = an.AreaNotice(an.notice_type['cau_mammals_not_obs'],datetime.datetime(year,8,6,0,1,0),60,10, source_mmsi = 445566778) pt1.add_subarea(an.AreaNoticeCirclePt(-69.8, 42.0, radius=0)) orig = geojson.loads( geojson.dumps(pt1) ) decoded_pt = an.AreaNotice(nmea_strings=[ line for line in pt1.get_aivdm() ] ) decoded = geojson.loads( geojson.dumps(decoded_pt) ) self.assertAlmostEqualGeojson(orig, decoded, verbose=True) self.assertAlmostEqualGeojson(orig, decoded)
def test_polygon(self): 'polygon' poly1 = an.AreaNotice(an.notice_type['cau_divers'], datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4), 60, 10, source_mmsi=987123456) poly1.add_subarea(an.AreaNoticePolygon([(10,1400),(90,1950)], -69.8, 42.5 )) #print 'poly1:',poly1 #print 'poly1:',poly1.areas[0] orig = geojson.loads( geojson.dumps(poly1) ) poly2 = an.AreaNotice(nmea_strings=[ line for line in poly1.get_aivdm() ] ) #print 'line2',line2 #print 'line2:',str(line2.areas[0]) decoded = geojson.loads( geojson.dumps(poly2) ) #print orig #print decoded self.failUnless( almost_equal_geojson(orig, decoded) ) #, verbose=True) )
def test_decode(self): """ Ensure a GeoJSON string can be decoded into GeoJSON objects """ actual = geojson.loads(self.restaurant_str) expected = self.restaurant1.__geo_interface__ self.assertEqual(expected, actual)
def getChips(sr, project_id, plotid, collection, chip_name, vis, poly): """retrieve image chips. Args: collection (ee.ImageCollection): a image colleciton to extract chips. vis (dictionary): visualization parameters. point (ee.Geometry.Point): pixel sample. size (int): image chip dimension in pixels. ` Returns: none """ print '\t\t', 'chipping', sr, project_id, plotid, chip_name first = ee.Image(collection.first()).visualize(**vis).unmask() result = collection.iterate(lambda img, prev: ee.Image(prev).addBands(ee.Image(img).visualize(**vis).unmask()), first) this_job = '%s_project_%d_plot_%d_%s' % (sr, project_id, plotid, chip_name) this_folder = 'prj_%s' % project_id # # task = Export.image.toDrive(ee.Image(result), this_job, this_folder, this_job, None, # poly.transform(None, 15).getInfo()['coordinates'], None, # CRS, CRS_TRANSFORM, 1e13) task = Export.image.toDrive(ee.Image(result), this_job, this_folder, this_job, None, ee.Geometry(geojson.loads(json.dumps(poly.getInfo()))), None, CRS, CRS_TRANSFORM, 1e13) task.start()
def lidar_shp(request): """ Transform the profile line (2D) to ESRI shapefile """ # set up paths geom = geojson.loads(request.params['geom'], object_hook = geojson.GeoJSON.to_instance) outputDir = request.registry.settings['lidar_output_dir'].replace('\\', '/') outputShp= outputDir + str(uuid.uuid4()) # Create pyshp polyline ESRI shapfile and write it to disk shapeParts = [] outShp = shapefile.Writer(shapefile.POLYLINE) outShp.line(parts=[geom.coordinates]) outShp.field('FIRST_FLD','C','40') outShp.record('First','Line') outShp.save(outputShp) # zip the shapefile for nice single output zipShp = zip(outputShp +'.zip', mode='w') zipShp.write(outputShp + '.shp', os.path.basename(outputShp + '.shp')) zipShp.write(outputShp + '.dbf', os.path.basename(outputShp + '.dbf')) zipShp.write(outputShp + '.shx', os.path.basename(outputShp + '.shx')) zipShp.close() # remove files os.remove(outputShp + '.shx') os.remove(outputShp + '.shp') os.remove(outputShp + '.dbf') return FileResponse(outputShp + '.zip', request = request, content_type = 'application/zip')
def get_polygon(self): if not self.geojson: return None polygon = geojson.loads(self.geojson) if polygon['type'] != 'Polygon': return None return polygon['coordinates']
def get_point(self): if not self.geojson: return None point = geojson.loads(self.geojson) if point['type'] != 'Point': return None return (float(point['coordinates'][0]), float(point['coordinates'][1]))
def get_coordinates(geojson_file, feature_number=0): """Return the coordinates of a polygon of a GeoJSON file.""" geojson_obj = geojson.loads(open(geojson_file, 'r').read()) coordinates = geojson_obj['features'][feature_number]['geometry']['coordinates'][0] # precision of 7 decimals equals 1mm at the equator coordinates = ['%.7f %.7f' % tuple(coord) for coord in coordinates] return ','.join(coordinates)
def create(self, request): """ Read the GeoJSON feature collection from the request body and create new objects in the database. """ if self.readonly: return HTTPMethodNotAllowed(headers={'Allow': 'GET, HEAD'}) collection = loads(request.body, object_hook=GeoJSON.to_instance) if not isinstance(collection, FeatureCollection): return HTTPBadRequest() session = self.Session() objects = [] for feature in collection.features: create = False obj = None if hasattr(feature, 'id') and feature.id is not None: obj = session.query(self.mapped_class).get(feature.id) if self.before_create is not None: self.before_create(request, feature, obj) if obj is None: obj = self.mapped_class(feature) create = True else: obj.__update__(feature) if create: session.add(obj) objects.append(obj) session.flush() collection = FeatureCollection(objects) if len(objects) > 0 else None request.response.status_int = 201 return collection
def import_from_geojson(self, input): collection = geojson.loads(input, object_hook=geojson.GeoJSON.to_instance) tasks = [] hasPolygon = False if not hasattr(collection, "features") or \ len(collection.features) < 1: raise ValueError("GeoJSON file doesn't contain any feature.") for feature in collection.features: geometry = shapely.geometry.asShape(feature.geometry) if isinstance(geometry, shapely.geometry.Polygon): hasPolygon = True elif not isinstance(geometry, shapely.geometry.Polygon): continue tasks.append(Task(None, None, None, 'SRID=4326;%s' % geometry.wkt)) if not hasPolygon: raise ValueError("GeoJSON file doesn't contain any polygon.") self.tasks = tasks bounds = DBSession.query(ST_Convexhull(ST_Collect(Task.geometry))) \ .filter(Task.project_id == self.id).one() self.area = Area(bounds[0]) return len(tasks)
def parse_geometry(geometry_raw, rewrite_circle=False): geometry_statement = None # is it WKT? try: geometry_statement = sqlalchemy.sql.expression.func.GeomFromText( geomet.wkt.dumps(geomet.wkt.loads(geometry_raw))) except ValueError as err: logging.debug(' ... not WKT') # is it GeoJSON? if not geometry_statement: try: geometry_statement = sqlalchemy.sql.expression.func.GeomFromText( geomet.wkt.dumps(geojson.loads(geometry_raw))) except ValueError as err: logging.debug(' ... not GeoJSON') if not geometry_statement and rewrite_circle and 'CIRCLE' in geometry_raw: # now see if it a CIRCLE(long lat, rad_in_m) re_res = re.findall( r'CIRCLE\s*\(\s*([0-9.-]+)\s+([0-9.-]+)\s*,\s*([0-9.]+)\s*\)', geometry_raw) if re_res and len(re_res[0]) == 3: lng = float(re_res[0][0]) lat = float(re_res[0][1]) rad = float(re_res[0][2]) geometry_statement = sqlalchemy.sql.expression.func.Buffer( sqlalchemy.sql.expression.func.POINT(lng, lat), rad / 1000 / 111.045) else: logging.warn('ignoring malformed intersects statement:%s', geometry_raw) logging.info('%s becomes %s', geometry_raw, geometry_statement) return geometry_statement
def plot_save(): with open('data.json', 'r') as f: load = geojson.loads(f.read()) features = load.features data = pandas.DataFrame() data['x'] = [x['geometry']['coordinates'][0] for x in features] data['y'] = [x['geometry']['coordinates'][1] for x in features] data['value'] = [(x['properties']['re']+x['properties']['rei']) for x in features] data['trees'] = [math.ceil(math.log1p(x['properties']['trs'])*1.17)+1 for x in features] data = data[data['value'] < data['value'].quantile(.99)] print data.trees.describe() print data.value.quantile(.1), data.value.quantile(.9) print data.value.describe() plt.figure(figsize=(20, 15)) #plt.xlabel('Leafiness Index') #plt.ylabel('Real Estate Value') #plt.scatter(data.x, data.y, c=data.trees, s=4, alpha=0.1) #plt.ylim([0, 3000000]) #plt.boxplot([data[data.trees==x].value for x in range(1, 11)]) plt.hist(data.trees, bins=10) #data.to_csv("data.csv", index=False) return
def read_report_dir(rptdir, total_parcel_count=0): #rpt dir passed in, just read the preprossed report data rpt = FloodReport() rpt.total_parcel_count = total_parcel_count rpt.model = swmmio.Model(os.path.dirname(rptdir)) rpt.scenario = rpt.model.scenario rpt.parcel_flooding = pd.read_csv( os.path.join(rptdir, 'parcel_flood_comparison.csv')) rpt.parcel_hrs_flooded = rpt.parcel_flooding.HoursFloodedProposed.sum() rpt.parcel_vol_flooded = rpt.parcel_flooding.TotalFloodVolProposed.sum() costcsv = os.path.join(rptdir, 'cost_estimate.csv') conduits_geojson_path = os.path.join(rptdir, 'new_conduits.json') if os.path.exists(costcsv): #calc the cost estimate total in millions cost_df = pd.read_csv(costcsv) rpt.cost_estimate = cost_df.TotalCostEstimate.sum() / math.pow(10, 6) if os.path.exists(conduits_geojson_path): with open(conduits_geojson_path, 'r') as f: rpt.new_conduits_geojson = geojson.loads(f.read()) return rpt
def search_source_locations( vial_http: urllib3.connectionpool.ConnectionPool, **kwds: Any, ) -> Iterator[dict]: """Wrapper around search source locations api. Returns geojson.""" params = { **kwds, "format": "nlgeojson", } query = urllib.parse.urlencode(params) resp = vial_http.request("GET", f"/api/searchSourceLocations?{query}", preload_content=False) for line in resp: try: yield geojson.loads(line) except json.JSONDecodeError: logger.warning("Invalid json record in search response: %s", line) resp.release_conn()
def search_projects(search_dto: ProjectSearchDTO, user) -> ProjectSearchResultsDTO: """ Searches all projects for matches to the criteria provided by the user """ all_results, paginated_results = ProjectSearchService._filter_projects( search_dto, user ) if paginated_results.total == 0: raise NotFound() dto = ProjectSearchResultsDTO() dto.results = [ ProjectSearchService.create_result_dto( p, search_dto.preferred_locale, Project.get_project_total_contributions(p[0]), ) for p in paginated_results.items ] dto.pagination = Pagination(paginated_results) if search_dto.omit_map_results: return dto features = [] for project in all_results: # This loop creates a geojson feature collection so you can see all active projects on the map properties = { "projectId": project.id, "priority": ProjectPriority(project.priority).name, } # centroid = project.centroid feature = geojson.Feature( geometry=geojson.loads(project.centroid), properties=properties ) features.append(feature) feature_collection = geojson.FeatureCollection(features) dto.map_results = feature_collection return dto
def json2geojson(data): mileuzones = data["milieuzones"] features = [] for element in mileuzones: if "milieuzone" in element: milieuzone = element["milieuzone"] multipolygon = None geo = loads(milieuzone["geo"]) # Extract multipolygon if type(geo) is Polygon: multipolygon = MultiPolygon(f) elif type(geo) is MultiPolygon: multipolygon = geo elif type(geo) is GeometryCollection: for f in geo["geometries"]: if type(f) is Polygon: multipolygon = MultiPolygon(f) break elif type(f) is MultiPolygon: multipolygon = f break if multipolygon is None: raise Exception("Missing (multi)polygon") properties = { key: milieuzone[key] for key in ("id", "verkeerstype", "vanafdatum") } features.append({ "type": "Feature", "geometry": multipolygon, "properties": properties, }) geojson = {"type": "FeatureCollection", "features": [f for f in features]} return geojson
def search_projects( search_dto: ProjectSearchDTO) -> ProjectSearchResultsDTO: """ Searches all projects for matches to the criteria provided by the user """ filtered_projects = ProjectSearchService._filter_projects(search_dto) if filtered_projects.total == 0: raise NotFound() dto = ProjectSearchResultsDTO() for project in filtered_projects.items: # TODO would be nice to get this for an array rather than individually would be more efficient project_info_dto = ProjectInfo.get_dto_for_locale( project.id, search_dto.preferred_locale, project.default_locale) result_dto = ProjectSearchResultDTO() result_dto.project_id = project.id result_dto.locale = project_info_dto.locale result_dto.name = project_info_dto.name result_dto.priority = ProjectPriority(project.priority).name result_dto.mapper_level = MappingLevel(project.mapper_level).name result_dto.short_description = project_info_dto.short_description result_dto.aoi_centroid = geojson.loads(project.centroid) result_dto.organisation_tag = project.organisation_tag result_dto.campaign_tag = project.campaign_tag result_dto.percent_mapped = round( (project.tasks_mapped / (project.total_tasks - project.tasks_bad_imagery)) * 100, 0) result_dto.percent_validated = round( ((project.tasks_validated + project.tasks_bad_imagery) / project.total_tasks) * 100, 0) dto.results.append(result_dto) dto.pagination = Pagination(filtered_projects) return dto
def area_distance(geoalchemy_polygon: WKBElement, geocode_geo_json=None) -> Dict[str, int]: """calculates area of polygon wkbelement and optionally distance to a geojson gemetry if it is provided. Args: geoalchemy_polygon (WKBElement): polygon whose area is desired geocode_geo_json (geojson geometry): geometry to calculate distance to Returns: Dict[str, int]: {'area': area in sqm, 'distance': distance in meters} if no input geojson geometry for distance calculation then returns -1 for distance """ input_polygon_shapely_geometry = geoalchemy2.shape.to_shape( geoalchemy_polygon) # project project_in = pyproj.Transformer.from_proj( pyproj.Proj(init='epsg:4326'), # source pyproj.Proj(init='epsg:3857')) # destination # get area input_polygon_projected = transform(project_in.transform, input_polygon_shapely_geometry) area = round(input_polygon_projected.area) # get distance if geocode_geo_json: geocode_json_geometry = json.dumps(geocode_geo_json) geocode_geo_json_obj = geojson.loads(geocode_json_geometry) geocode_shapely = geometry.shape(geocode_geo_json_obj) geocode_projected = transform(project_in.transform, geocode_shapely) distance = round( geocode_projected.distance(input_polygon_projected.centroid)) return {'area': area, 'distance': distance} return {'area': area, 'distance': -1}
def get(self, request, **kwargs): """ Returns geojson if requested, otherwise handles request as normal. """ geojson_requested = self.request.query_params.get('geojson') == 'true' # if geojson requested, create a query that returns each well's geometry as GeoJSON # so that we can easily create a FeatureCollection. # This might be more performant in the database using json_agg and ST_AsGeoJSON # vs creating geojson Features here in Python. if geojson_requested: qs = self.get_queryset() locations = self.filter_queryset(qs) count = locations.count() # return an empty response if there are too many wells to display if count > self.MAX_LOCATION_COUNT: raise PermissionDenied( 'Too many wells to display on map. ' 'Please zoom in or change your search criteria.') locations = locations.annotate( geometry=Cast(Func('geom', function='ST_AsGeoJSON'), output_field=TextField())).values( "well_tag_number", "identification_plate_number", "geometry", "street_address", "city") # create a group of features features = [ Feature(geometry=geojson.loads(x.pop('geometry')), properties=dict(x)) for x in locations ] return HttpResponse(geojson.dumps(FeatureCollection(features))) return super().get(request)
def read_json_string_to_df(json_string: str) -> GeoDataFrame: geom_json = geojson.loads(json_string.replace("'", '"')) if isinstance(geom_json, dict): if geom_json["type"] == "FeatureCollection": # st.info("Reading FeatureCollection ...") fc = geom_json if geom_json["type"] == "Feature": # st.info("Reading Feature ...") fc = FeatureCollection(features=[geom_json]) elif geom_json["type"] in [ "Polygon", "MultiPolygon", "Point", "MultiPoint", "LineString", "MultiLineString", "LinearRing", ]: # st.info("Reading Geometry ...") fc = FeatureCollection([Feature(geometry=geom_json)]) elif isinstance(geom_json, list): if len(geom_json) == 4 and isinstance(geom_json[0], float): # st.info("Reading bbox (Polygon) ...") geometry = mapping(box(*geom_json)) fc = FeatureCollection([Feature(geometry=geometry)]) elif isinstance(geom_json[0], list) and isinstance(geom_json[0][0], list): # st.info("Reading Coordinates (Polygon)...") geometry = {"type": "Polygon", "coordinates": geom_json} fc = FeatureCollection([Feature(geometry=geometry)]) else: st.error( "Could not read json string! Check missing brackets! Only FeatureCollection, " "Feature, Geometry, Coordinates, or bbox are allowed!" ) st.stop() df = gpd.GeoDataFrame.from_features(fc, crs="EPSG:4326") return df
def save_supplement(public_id, df, output_dir, category, name, props=None): ''' Save a geopandas dataframe to geojson file. ''' logger_name = __name__ logger = logging.getLogger(logger_name) event_dir = event_dir_from_publicid(public_id) supp_map = get_supplement_map() if category not in supp_map.keys(): logger.error( 'The category %s was not found in the available categories.', category) return supplement_sub_dir = supp_map[category][name]['subdir'] supplement_name = supp_map[category][name]['name'] output_dir = os.path.join(output_dir, event_dir, supplement_sub_dir) fc = geojson.loads(df.to_json()) # Add foreign members to the feature collection. fc.name = supplement_name fc.properties = { 'public_id': public_id, 'computation_time': isoformat_tz(obspy.UTCDateTime()) } if props is not None: fc.properties.update(props) filepath = write_geojson_file(fc, category=category, name=supplement_name, prefix=public_id, output_dir=output_dir) return filepath
def validate_geojson_point(obj): """ Validate that input object is a valid json and has valid GeoJSON format for a "Point". Required Arg (json|dict): GeoJSON Point object (we can handle json-str or dict) Raises: InvalidGeoJson() if object does not meet criteria for valid GeoJSON Point Returns: dict representation of GeoJSON Point """ d_point = None ## basic json validation try: d_point = basic_json_validation(obj) except Exception as e: raise ## GeoJSON Point validation point = geojson.dumps(d_point) dprint(1, point) json_point = geojson.loads(point) dprint(1, json_point) try: point = geojson.Point(json_point) except ValueError as e: raise InvalidGeoJson("Invalid GeoJSON Point: %s" % e) dprint(1, point) valid = point.is_valid dprint(1, "VALID=%s" % valid) if not valid: l_errors = point.errors() dprint(1, "ERRORS:%s" % l_errors) raise InvalidGeoJson("Invalid GeoJSON Point: %s" % str(l_errors)) return (d_point)
def cache_images(): combined = geojson.loads(readfile('nairobi-combined-schools.geojson')) for index, feature in enumerate(combined.features): images = [] large_images = [] for prop in [ "osm:image:classroom", "osm:image:compound", "osm:image:other", "osm:image:outside" ]: if prop in feature['properties']: #cache_image(feature['properties']['osm:id'], feature['properties']['osm:name'], prop, feature['properties'][prop]) image = get_image_cache(feature['properties']['osm:id'], prop, feature['properties'][prop], 'med') images.append(image) image = get_image_cache(feature['properties']['osm:id'], prop, feature['properties'][prop], 'large') large_images.append(image) if len(images) > 0: combined.features[index]['properties']['osm:images'] = ','.join( images) combined.features[index]['properties'][ 'osm:large_images'] = ','.join(large_images) dump = geojson.dumps(combined, sort_keys=True, indent=2) writefile('nairobi-combined-schools.geojson', dump)
def GetLipasData(typecode, typename): """ This function fetches LIPAS data from WFS and sets its crs. LIPAS data is returned to the user with simplified attributes; id, name of the sport place in Finnish and Swedish, type code and type name in Finnish. Arguments: First argument is 4 digit typecode of the sport facility and second is the typename of the sport facility in Finnish. Exhaustive list of these can be found from data/Codes_LIPAS.csv. """ # Fetching data from WFS using requests, in json format, using bounding box over the helsinki area r = requests.get( """http://lipas.cc.jyu.fi/geoserver/lipas/ows?service=wfs&version=2.0.0&request=GetFeature&typeNames=lipas:lipas_""" + typecode + """_""" + typename + """&bbox=361500.0001438780454919,6665250.0001345984637737,403750.0001343561452813,6698000.0001281434670091,EPSG:3067&outputFormat=json""" ) # Creating GeoDataFrame from geojson lipas_data = gpd.GeoDataFrame.from_features(geojson.loads(r.content)) # Removing unnecessary attributes from lipas_data lipas_data = lipas_data[[ "geometry", "id", "nimi_fi", "nimi_se", "tyyppikoodi", "tyyppi_nimi_fi" ]] # Define crs for lipas_data lipas_data.crs = {'init': 'epsg:3067'} return lipas_data
def getmapminerfeatures(request): """ End-point to retrieve geographical features from regions of interest. Parameters ---------- request : HttpRequest An HTTP 'POST' request with: - regions of interest (GeoJSON) - Map miner Id (str) - Feature Name (str) E.g.: { 'mapMinerId': 'osm', 'featureName': 'streets' 'regions': <GeoJSON> } Returns ------- JsonResponse: GeoJSON The response will be a GeoJSON object with the requested feature collected, from the Map Miner selected, inside the region(s) of interest. """ jsondata = request.data mapMinerId = jsondata["mapMinerId"] query = jsondata["featureName"] region = geojson.loads(jsondata["regions"]) ret = mapMinerManager.requestQueryToMapMiner(mapMinerId, query, region) return JsonResponse(ret, CustomJSONEncoder)
def _buffer(json_geometry: str, distance: int) -> WKBElement: """assumes source crs is 4326 and projected crs to use is 3857""" geo_json_obj = geojson.loads(json_geometry) shapely_geo_json = geometry.shape(geo_json_obj) # project to create buffer project_in = pyproj.Transformer.from_proj( pyproj.Proj(init='epsg:4326'), # source pyproj.Proj(init='epsg:3857')) # destination shapely_geo_json_projected = transform(project_in.transform, shapely_geo_json) # buffer shapely_geojson_buffer_project = shapely_geo_json_projected.buffer( distance) # project back project_out = pyproj.Transformer.from_proj( pyproj.Proj(init='epsg:3857'), # source pyproj.Proj(init='epsg:4326')) # destination shapely_geo_json_buffered = transform( project_out.transform, shapely_geojson_buffer_project) # convert to geoalchemy element geoalchemy_element = geoalchemy2.shape.from_shape( shapely_geo_json_buffered) return geoalchemy_element
def execute(trial=False): '''Retrieve some data sets (not using the API here for the sake of simplicity).''' startTime = datetime.datetime.now() # Set up the database connection. client = dml.pymongo.MongoClient() repo = client.repo repo.authenticate('colinstu', 'colinstu') url = 'https://mapc-admin.carto.com/api/v2/sql?q=SELECT%20a.seq_id%2Ca.muni_id%2Ca.municipal%2Ca.countyname%2Ca.areaname%2Ca.fy_year%2Ca.median%2Ca.il_50_1%2Ca.il_50_2%2Ca.il_50_3%2Ca.il_50_4%2Ca.il_50_5%2Ca.il_50_6%2Ca.il_50_7%2Ca.il_50_8%2Ca.il_30_1%2Ca.il_30_2%2Ca.il_30_3%2Ca.il_30_4%2Ca.il_30_5%2Ca.il_30_6%2Ca.il_30_7%2Ca.il_30_8%2Ca.il_80_1%2Ca.il_80_2%2Ca.il_80_3%2Ca.il_80_4%2Ca.il_80_5%2Ca.il_80_6%2Ca.il_80_7%2Ca.il_80_8%2C%20b.the_geom%2C%20b.the_geom_webmercator%20%20FROM%20hous_section8_income_limits_by_year_m%20a%20%20INNER%20JOIN%20ma_municipalities%20b%20ON%20a.muni_id%20%3D%20b.muni_id%20WHERE%20a.fy_year%20IN%20(%272017%27)&format=geojson&filename=hous_section8_income_limits_by_year_m' response = urllib.request.urlopen(url).read().decode("utf-8") r = geojson.loads(response) s = geojson.dumps(r, sort_keys=True, indent=2) repo.dropCollection("HUDincome") repo.createCollection("HUDincome") repo['colinstu.HUDincome'].insert_many(r['features']) repo['colinstu.HUDincome'].metadata({'complete': True}) print(repo['colinstu.HUDincome'].metadata()) repo.logout() endTime = datetime.datetime.now() return {"start": startTime, "end": endTime}
def export_geojson(self, file, **kwargs): path = os.path.abspath(file) fh = open(path, 'r') if kwargs.get('line_delimited', False): for ln in fh.readlines(): data = geojson.loads(ln) if not data: logging.warning("failed to parse line") logging.debug(ln) continue self.export_feature(data) else: try: data = geojson.load(fh) except Exception, e: logging.error("Failed to load JSON for %s, because" % (path, e)) return False features = data.get('features', []) if len(features) == 0: logging.warning("%s has not features" % path) return False for f in features: self.export_feature(f, **kwargs)
def subset_s2(path, file, aoi): # Read Data________________________________________________________________ print("SUBSET: Read Product...") sentinel = ProductIO.readProduct(path+file) print("SUBSET: Done reading!") # Get Band Names and print info name = sentinel.getName() print("SUBSET: Image ID: %s" % name) band_names = sentinel.getBandNames() print("SUBSET: Bands: %s" % (list(band_names))) # Preprocessing ___________________________________________________________ # Resampling parameters = HashMap() parameters.put('targetResolution', 10) print("SUBSET: resample target resolution: 10m") product_resample = snappy.GPF.createProduct('Resample', parameters, sentinel) # Geojson to wkt with open(aoi) as f: gjson = json.load(f) for feature in gjson['features']: polygon = (feature['geometry']) str_poly = json.dumps(polygon) gjson_poly = geojson.loads(str_poly) poly_shp = shape(gjson_poly) wkt = poly_shp.wkt # Subset geom = WKTReader().read(wkt) op = SubsetOp() op.setSourceProduct(product_resample) op.setGeoRegion(geom) product_sub = op.getTargetProduct() # Write Data_______________________________________________________ print("SUBSET: Writing subset.") subset = path + name + '_subset_' ProductIO.writeProduct(product_sub, subset, "BEAM-DIMAP") print("SUBSET: Done and saved in %s" % path)
def clean_osm(file): osm = geojson.loads(readfile(file)) for feature in osm.features: properties = {} #properties = feature.properties for osm_property in feature.properties['tags'].keys(): properties["osm:" + osm_property] = feature.properties['tags'][osm_property] properties["osm:_user"] = feature.properties['meta']['user'] properties["osm:_timestamp"] = datetime.strptime( feature.properties['meta']['timestamp'], '%Y-%m-%dT%H:%M:%SZ').strftime('%Y-%m-%d') properties["osm:id"] = feature['id'] #TODO change to "_id"? properties["osm:location"] = os.path.splitext( os.path.basename(file))[0].split('-')[0] feature.properties = properties for prop in feature.properties.keys(): if prop.startswith('osm:polling_station:'): feature.properties.pop(prop, None) dump = geojson.dumps(osm, sort_keys=True, indent=2) writefile(file, dump)
def _get_details_at_map(self, place): soup = self._get_soup(place['map']) script = soup.find(string=re.compile('var points')) pattern = r"""L.marker\(pos,\{title : "(.*)", icon : stateIcons\[(\d)\]\}\).addTo\(map\).bindPopup\('<h2>.*</h2><img src="(.*)" height="100" /><p>GPS: (.*), (.*)</p>'\);""" matches = re.findall(pattern, script) markers = [] for match in matches: marker = {'title': match[0], 'image': 'https://www.kafelanka.cz' + match[2], 'latitude': float(match[3]), 'longitude': float(match[4]), 'accessibility': self.accessibilities[int(match[1]) - 1]} markers.append(marker) place['image'] = markers[0]['image'] place['latitude'] = markers[0]['latitude'] place['longitude'] = markers[0]['longitude'] place['accessibility'] = markers[0]['accessibility'] place['markers'] = markers pattern = r'L.geoJSON\(([\s\S]*)\).addTo\(map\);' match = re.search(pattern, script) if match: feature_collection = geojson.loads(match.group(1)) place['features'] = feature_collection['features'] return place
def get_masked_latlongs(band_tif_file: str, geo_json: json, no_data_value: int) -> array: ''' Retruns the latitudes and lognitudes of the pixel(inside geo_json) centroids :param str band_tif_file: tiff file path :param json geo_json: geo json of the area of interest :param str no_data_value: no data value :return: returns latitudes and longitudes of the given band pixel centroids after masking given geo_json ''' with rasterio.open(band_tif_file) as src: shape_jsons = [] shape_jsons.append(geojson.loads(geo_json)) masked_band, out_transform = rasterio.mask.mask( dataset=src, shapes=shape_jsons, crop=True, nodata=no_data_value) # All rows and columns cols, rows = np.meshgrid(np.arange(masked_band.shape[2]), np.arange(masked_band.shape[1])) # Flatten all rows, cols, masked_band rows = rows.flatten() cols = cols.flatten() masked_band = masked_band.flatten() # Add half the cell size to get pixel centroid(latitude and longitude) (upper_left_x, x_size, x_rotation, upper_left_y, y_rotation, y_size) = out_transform.to_gdal() lng = cols * x_size + upper_left_x + (x_size / 2) lat = rows * y_size + upper_left_y + (y_size / 2) extent_lng_lat = np.column_stack([lng, lat]) farm_lng_lat = extent_lng_lat[masked_band != no_data_value] return (extent_lng_lat, farm_lng_lat)
async def _fetch(self, method: str = "GET", headers=None, params=None) -> Tuple[str, Optional[FeatureCollection]]: """Fetch GeoJSON data from external source.""" try: timeout = aiohttp.ClientTimeout( total=self._client_session_timeout()) async with self._websession.request(method, self._url, headers=headers, params=params, timeout=timeout) as response: try: response.raise_for_status() text = await response.text() feature_collection = geojson.loads(text) return UPDATE_OK, feature_collection except client_exceptions.ClientError as client_error: _LOGGER.warning("Fetching data from %s failed with %s", self._url, client_error) return UPDATE_ERROR, None except JSONDecodeError as decode_ex: _LOGGER.warning("Unable to parse JSON from %s: %s", self._url, decode_ex) return UPDATE_ERROR, None except client_exceptions.ClientError as client_error: _LOGGER.warning( "Requesting data from %s failed with " "client error: %s", self._url, client_error) return UPDATE_ERROR, None except asyncio.TimeoutError: _LOGGER.warning( "Requesting data from %s failed with " "timeout error", self._url) return UPDATE_ERROR, None
def POST(self): # Read POST body as JSON (TODO: validate Content-Type is correct first) data = geojson.loads(web.data()) # Since this endpoint returns application/json, let's set the right header web.header('Content-Type', 'application/json') web.header('Access-Control-Allow-Origin', '*') web.header('Access-Control-Allow-Methods', 'POST') # Fetch all properties within distance specified from point specified query_db( """ SELECT id, ST_X(geocode_geo::geometry) AS longitude, ST_Y(geocode_geo::geometry) AS latitude FROM properties WHERE ST_Distance_Sphere(geocode_geo::geometry, ST_MakePoint(%s, %s)) <= %s """, [ data.geometry.coordinates[0], data.geometry.coordinates[1], data["x-distance"] ]) return json.dumps([{ 'propertyId': row[0], 'coordinates': [row[1], row[2]] } for row in cur.fetchall()])
def download(request): if request.method == 'POST': data = json.loads(request.body) footprint = geojson_to_wkt(geojson.loads(data['geoJson'])) username = config.username # ask ITC for the username and password password = config.password api = SentinelAPI(username, password, "https://apihub.copernicus.eu/apihub/" ) # fill with SMARTSeeds user and password tanggal = '[{0} TO {1}]'.format(data['dateFrom'].replace('.000Z', 'Z'), data['dateTo'].replace('.000Z', 'Z')) print(tanggal) products = api.query(footprint, producttype=config.producttype, orbitdirection=config.orbitdirection, platformname='Sentinel-1', date=tanggal) #menyimpan di folder sentineldata dirpath = os.getcwd() + '/sentineldata' for product in products: try: api.download(product, directory_path=dirpath, checksum=True) except: continue for item in os.listdir(dirpath): if item.endswith(".incomplete"): os.remove(os.path.join(dirpath, item)) #fungsi notifikasi email = send_mail( #libary django untuk mengirim email 'Your Download was successful!', #subject email 'Terima kasih sudah menggunakan aplikasi webgis data yang anda unduh sudah masuk kedalam sistem website!', #isi email settings.EMAIL_HOST_USER, #email host pengirim notifikasi [request.user.email], #email penerima notifikasi fail_silently=False, ) return HttpResponse(request.body)
def read_geojson(geojson_fn): with open(geojson_fn, encoding="utf-8") as src: gj = geojson.loads(src.read()) polys = [] for ind, feature in enumerate(gj.features): if feature['geometry']['type'] == 'MultiPolygon': for pol in feature['geometry']['coordinates']: if len(pol) == 1: polys.append(Polygon(pol[0], holes = None)) else: holes = [] for hole in pol[1:]: holes.append(hole) polys.append(Polygon(pol[0], holes = holes)) else: if len(feature['geometry']['coordinates']) == 1: polys.append(Polygon(feature['geometry']['coordinates'][0], holes = None)) else: holes = [] for hole in feature['geometry']['coordinates'][1:]: holes.append(hole) #[0] polys.append(Polygon(feature['geometry']['coordinates'][0], holes = holes)) return polys
def polygon_add(request): response = {} if request.is_ajax() and request.method == 'POST': info = request.POST year = info.get('year') source = info.get('source') layer = info.get('layer') polygon = info.get('polygon') polygon = geojson.loads(polygon) geom = shape(polygon) Session = GloVli.get_persistent_store_database('layers', as_sessionmaker=True) session = Session() point_obj = Polygons(layer_name=layer, year=year, source=source, approved=False, geometry=geom.wkt) session.add(point_obj) session.commit() session.close() response = {"success": "success"} return JsonResponse(response)
def geojson_to_polygons(js_): """Convert the geojson into Shapely Polygons. Adapted from: https://gist.github.com/drmalex07/5a54fc4f1db06a66679e :param js_: geojson with segments as Polygons :return: list of Shapely Polygons of segments""" polys = [] for i, feat in enumerate(js_['features']): o = { "coordinates": feat['geometry']['coordinates'], "type": feat['geometry']['type'] } s = json.dumps(o) # convert to geojson.geometry.Polygon g1 = geojson.loads(s) # covert to shapely.geometry.polygon.Polygon g2 = shape(g1) polys.append(g2) return polys
def execute(trial = False, log=False): '''Retrieve existing bike network data.''' startTime = datetime.datetime.now() # Set up the database connection. client = dml.pymongo.MongoClient() repo = client.repo repo.authenticate('bmroach', 'bmroach') # Do retrieving of data repo.dropCollection("bike_network") repo.createCollection("bike_network") url = 'http://bostonopendata-boston.opendata.arcgis.com/datasets/d02c9d2003af455fbc37f550cc53d3a4_0.geojson' response = urllib.request.urlopen(url).read().decode("utf-8") gj = geojson.loads(response) geoDict = dict(gj) geoList = geoDict['features'] repo['bmroach.bike_network'].insert_many( geoList ) repo['bmroach.bike_network'].metadata({'complete':True}) repo.logout() endTime = datetime.datetime.now() return {"start":startTime, "end":endTime}
def get_coordinates(geojson_file=None, tile=None, feature_number=0): """Return the coordinates of a polygon of a GeoJSON file. Parameters ---------- geojson_file : str location of GeoJSON file_path tile : str Sentinel-2 tileID (can be given instead of geojson file). But if geojson_file is given, tile will be ignored. feature_number : int Feature to extract polygon from (in case of MultiPolygon FeatureCollection), defaults to first Feature Returns ------- string of comma separated coordinate tuples (lon, lat) for polygons or a single (lat, long) for points (if tile is given) to be used by SentinelAPI """ assert (geojson_file is not None) | ( tile is not None), "Either geojson_file or tile must be provided." if geojson_file is not None: geojson_obj = geojson.loads(open(geojson_file, 'r').read()) coordinates = geojson_obj['features'][feature_number]['geometry'][ 'coordinates'][0] # precision of 7 decimals equals 1mm at the equator coordinates = ['%.7f %.7f' % tuple(coord) for coord in coordinates] elif tile is not None: assert hasPandas, "pandas must be installed to use 'tile' option." csv_file = "{0}/data/tile_centroids.csv".format( dirname(realpath(__file__))) tile_centroids = pd.read_csv(csv_file) tile_subset = tile_centroids[tile_centroids['tile'] == tile] coordinates = [float(tile_subset['lat']), float(tile_subset['lon'])] coordinates = ['%.7f' % coord for coord in coordinates] return ','.join(coordinates)
def get_masked_band(band_tif_file: str, geo_json: json, no_data_value: int) -> array: ''' Masks the pixels outside the given boundary with no_data_value :param str band_tif_file: tiff file path :param json geo_json: geo json of the area of interest :param str no_data_value: no data value :return: returns masked band after masking given geo_json ''' masked_band = None with rasterio.open(band_tif_file) as src: if src.count > 1: raise Exception('Incorrect number of bands in tif file %d', src.count) else: shape_jsons = [] shape_jsons.append(geojson.loads(geo_json)) masked_band, out_transform = rasterio.mask.mask( dataset=src, shapes=shape_jsons, crop=True, nodata=no_data_value) return masked_band
def make_geojson(request, entry): map_geojson = serialize( "geojson", [entry], geometry_field="census_blocks_polygon", fields=( "entry_name", "cultural_interests", "economic_interests", "comm_activities", "other_considerations", ), ) gj = geojson.loads(map_geojson) gj = rewind(gj) del gj["crs"] user_map = entry if user_map.organization: gj["features"][0]["properties"][ "organization" ] = user_map.organization.name if user_map.drive: gj["features"][0]["properties"]["drive"] = user_map.drive.name if request.user.is_authenticated: is_org_leader = user_map.organization and ( request.user.is_org_admin(user_map.organization_id) ) if is_org_leader or request.user == user_map.user: gj["features"][0]["properties"]["author_name"] = user_map.user_name for a in Address.objects.filter(entry=user_map): addy = ( a.street + " " + a.city + ", " + a.state + " " + a.zipcode ) gj["features"][0]["properties"]["address"] = addy feature = gj["features"][0] return feature