def generator_function(f, verbose): counter = 0 for line in f: try: obj = geojson.loads(line) except: print "Unexpected error:", sys.exc_info() continue properties = property_map(obj.get('properties')) geometry = obj.get('geometry') geom_type = geometry.get('type') if geom_type == 'Polygon': poly = asShape(geometry) bounds = poly.bounds feature = geojson.Feature(id=counter, geometry=poly, properties=properties) print counter, bounds, properties.get('name') counter += 1 yield (counter, bounds, json.loads(geojson.dumps(feature))) elif geom_type == 'MultiPolygon': mpoly = asShape(geometry) for poly in mpoly: bounds = poly.bounds feature = geojson.Feature(id=counter, geometry=poly, properties=properties) print counter, bounds, properties.get('name') counter += 1 yield (counter, bounds, json.loads(geojson.dumps(feature))) else: print "unsupported type", geom_type continue
def symmetric_difference(shape, other): if not hasattr(shape,'__geo_interface__'): raise TypeError("%r does not appear to be a shape"%shape) if not hasattr(other,'__geo_interface__'): raise TypeError("%r does not appear to be a shape"%shape) o = geom.asShape(shape) o2 = geom.asShape(other) res = o.symmetric_difference(o2) return asShape(res)
def shp_to_shply_multiply(shapefile_path): """ Convert Polygon Shapefile to Shapely MultiPolygon :param shapefile_path: path to a shapefile on disk :return: shapely MultiPolygon """ in_ply = shapefile.Reader(shapefile_path) # using pyshp reading geometry ply_shp = in_ply.shapes() if len(ply_shp) > 1: # using python list comprehension syntax # shapely asShape to convert to shapely geom ply_list = [asShape(feature) for feature in ply_shp] # create new shapely multipolygon out_multi_ply = MultiPolygon(ply_list) # # equivalent to the 2 lines above without using list comprehension # new_feature_list = [] # for feature in features: # temp = asShape(feature) # new_feature_list.append(temp) # out_multi_ply = MultiPolygon(new_feature_list) print "converting to MultiPolygon: " else: print "one or no features found" shply_ply = asShape(ply_shp) out_multi_ply = MultiPolygon(shply_ply) return out_multi_ply
def intersection(shape, other): if not hasattr(shape,'__geo_interface__'): raise TypeError("%r does not appear to be a shape"%shape) if not hasattr(other,'__geo_interface__'): raise TypeError("%r does not appear to be a shape"%shape) o = geom.asShape(shape) o2 = geom.asShape(other) res = o.intersection(o2) return asShape(res)
def merge(buildingIn, addressIn, mergedOut): addresses = [] with collection(addressIn, "r") as input: for address in input: shape = asShape(address['geometry']) shape.original = address addresses.append(shape) # Load and index all buildings. buildings = [] buildingShapes = [] buildingIdx = index.Index() with collection(buildingIn, "r") as input: for building in input: shape = asShape(building['geometry']) building['properties']['addresses'] = [] buildings.append(building) buildingShapes.append(shape) buildingIdx.add(len(buildings) - 1, shape.bounds) # Map addresses to buildings. for address in addresses: for i in buildingIdx.intersection(address.bounds): if buildingShapes[i].contains(address): buildings[i]['properties']['addresses'].append( address.original) with open(mergedOut, 'w') as outFile: outFile.writelines(json.dumps(buildings, indent=4)) print 'Exported ' + mergedOut
def reprojectPoint(obj, inProjection = 'epsg:3857', outProjection = 'epsg:4326'): #if our object is a raw string if type(obj) == str: obj = geojson.loads(obj) #if our object is a geojson dict if type(obj) == dict: #is our geojson a feature class? try: point = asShape(obj['geometry']) #or a regular geometry? except KeyError: point = asShape(obj) else: point = obj #pyproj transformation if type(inProjection) != str: inP = inProjection else: inP = pyproj.Proj(init=inProjection) if type(outProjection) != str: outP = outProjection else: outP = pyproj.Proj(init=outProjection) newCoords = [pyproj.transform(inP,outP,i[0],i[1]) for i in list(point.coords)] return Point(newCoords)
def test_linestring_geojson(self): '''Create a line that goes from west to east (clip on)''' self.defineGeometry('LINESTRING') geom = LineString( [(-180, 32), (180, 32)] ) self.insertTestRow(geom.wkt) # we should have a line that clips at 0... # for western hemisphere.... tile_mimetype, tile_content = utils.request(self.config_file_content, "vector_test", "geojson", 0, 0, 0) self.assertEqual(tile_mimetype, "text/json") geojson_result = json.loads(tile_content) west_hemisphere_geometry = asShape(geojson_result['features'][0]['geometry']) expected_geometry = LineString([(-180, 32), (0, 32)]) self.assertTrue(expected_geometry.almost_equals(west_hemisphere_geometry)) # for eastern hemisphere.... tile_mimetype, tile_content = utils.request(self.config_file_content, "vector_test", "geojson", 0, 1, 0) self.assertEqual(tile_mimetype, "text/json") geojson_result = json.loads(tile_content) east_hemisphere_geometry = asShape(geojson_result['features'][0]['geometry']) expected_geometry = LineString([(0, 32), (180, 32)]) self.assertTrue(expected_geometry.almost_equals(east_hemisphere_geometry))
def chunk(featureFileName, sectionFileName, pattern, key = None): # Load and index with collection(featureFileName, "r") as featureFile: featureIdx = index.Index() features = [] for feature in featureFile: features.append(feature) featureIdx.add(len(features) - 1, asShape(feature['geometry']).bounds) # Break up by sections and export with collection(sectionFileName, "r") as sectionFile: i = 0 for section in sectionFile: fileName = pattern % i if key: fileName = pattern % section['properties'][key] properties = {} try: with collection(fileName, 'w', 'ESRI Shapefile', schema = featureFile.schema, crs = featureFile.crs) as output: sectionShape = asShape(section['geometry']) for j in featureIdx.intersection(sectionShape.bounds): if asShape(features[j]['geometry']).intersects(sectionShape): properties = features[j]['properties'] output.write(features[j]) print "Exported %s" % fileName i = i + 1 except ValueError: print "Error exporting " + fileName pprint(properties) pprint(featureFile.schema)
def mix_tma_g(air_tma, air_g): eps = 1e-4 geom_tma = asShape(air_tma['geometry']) geom_g = asShape(air_g['geometry']) if geom_g.intersects(geom_tma): geom_tma_minus_g = geom_tma.difference(geom_g) geom_intersection = geom_g.intersection(geom_tma) # geom_g_minus_tma = geom_g.difference(geom_tma) if geom_intersection.area < eps: return if air_tma['properties']['lower'] >= air_g['properties']['upper']: return air_tma['geometry'] = geom_tma_minus_g air_intersection = Feature( geometry=geom_intersection, properties=deepcopy(air_tma['properties'])) air_intersection['properties']['name'] = '{} - {}'.format( air_tma['properties']['name'], air_g['properties']['name']) air_intersection['properties']['lower'] = air_g['properties']['upper'] del(air_intersection['properties']['lower_raw']) air_intersection['properties']['class'] = 'TMA_G_PG' air_intersection['properties']['fill'] = '#457aa3' return air_intersection
def getNetworkArea(self, bufferDist): from shapely.geometry import asShape segs = self.getEdges() segUnion = asShape(segs[0]).buffer(bufferDist) for n in xrange(len(segs) - 1): print n segUnion = segUnion.union(asShape(segs[n + 1]).buffer(bufferDist)) return segUnion.area
def _add_some_objects(self, some_objects, dst): for an_object in some_objects: if asShape(an_object.geometry).within(self.buffer): the_distance = self.shape.project(asShape(an_object.geometry)) # explanation: the buffer extends beyond the endpoints of the cross-section # points beyond the endpoints but within the buffer are # projected at 0. and length distance with a sharp angle # these points are not added to the cross-section # points exactly at 0. or length distance are also not added if (the_distance > 0.) and (the_distance < self.length): dst.append((the_distance, an_object))
def test_points_geojson(self): """ Create 3 points (2 on west, 1 on east hemisphere) and retrieve as geojson. 2 points should be returned in western hemisphere and 1 on eastern at zoom level 1 (clip on) """ self.defineGeometry("POINT") point_sf = Point(-122.42, 37.78) point_berlin = Point(13.41, 52.52) point_lima = Point(-77.03, 12.04) self.insertTestRow(point_sf.wkt, "San Francisco") self.insertTestRow(point_berlin.wkt, "Berlin") self.insertTestRow(point_lima.wkt, "Lima") ######## # northwest quadrant should return San Francisco and Lima tile_mimetype, tile_content = utils.request(self.config_file_content, "vectile_test", "json", 0, 0, 1) geojson_result = json.loads(tile_content) self.assertTrue(tile_mimetype.endswith("/json")) self.assertEqual(geojson_result["type"], "FeatureCollection") self.assertEqual(len(geojson_result["features"]), 2) cities = [] # Make sure that the right cities have been returned and that the geometries match for feature in geojson_result["features"]: if feature["properties"]["name"] == "San Francisco": cities.append(feature["properties"]["name"]) self.assertTrue(point_sf.almost_equals(asShape(feature["geometry"]))) elif feature["properties"]["name"] == "Lima": cities.append(feature["properties"]["name"]) self.assertTrue(point_lima.almost_equals(asShape(feature["geometry"]))) self.assertTrue("San Francisco" in cities) self.assertTrue("Lima" in cities) ########## # northeast quadrant should return Berlin tile_mimetype, tile_content = utils.request(self.config_file_content, "vectile_test", "json", 0, 1, 1) geojson_result = json.loads(tile_content) self.assertTrue(tile_mimetype.endswith("/json")) self.assertEqual(geojson_result["type"], "FeatureCollection") self.assertEqual(len(geojson_result["features"]), 1) self.assertTrue("Berlin" in geojson_result["features"][0]["properties"]["name"])
def distance(self, b): try: g = IGeoreferenced(self.context) geom = asShape({'type': g.type, 'coordinates': g.coordinates}) y0 = geom.centroid.y other = asShape(b.zgeo_geometry) d = geom.distance(other) return int(math.cos(math.pi*y0/180.0)*d/F/1000) except: log.warn("Failed to find distance between %s and %s" % ( self.context, b.getPath())) raise
def test_points_geojson(self): ''' Create 3 points (2 on west, 1 on east hemisphere) and retrieve as geojson. 2 points should be returned in western hemisphere and 1 on eastern at zoom level 0 (clip on) ''' self.defineGeometry('POINT') point_sf = Point(-122.4183, 37.7750) point_berlin = Point(13.4127, 52.5233) point_lima = Point(-77.0283, 12.0433) self.insertTestRow(point_sf.wkt, 'San Francisco') self.insertTestRow(point_berlin.wkt, 'Berlin') self.insertTestRow(point_lima.wkt, 'Lima') ######## # western hemisphere should return San Francisco and Lima tile_mimetype, tile_content = utils.request(self.config_file_content, "vector_test", "geojson", 0, 0, 0) geojson_result = json.loads(tile_content) self.assertEqual(tile_mimetype, "text/json") self.assertEqual(geojson_result['type'], 'FeatureCollection') self.assertEqual(len(geojson_result['features']), 2) cities = [] # Make sure that the right cities have been returned and that the geometries match for feature in geojson_result['features']: if feature['properties']['name'] == 'San Francisco': cities.append(feature['properties']['name']) self.assertTrue(point_sf.almost_equals(asShape(feature['geometry']))) elif feature['properties']['name'] == 'Lima': cities.append(feature['properties']['name']) self.assertTrue(point_lima.almost_equals(asShape(feature['geometry']))) self.assertTrue('San Francisco' in cities) self.assertTrue('Lima' in cities) ########## # eastern hemisphere should return Berlin tile_mimetype, tile_content = utils.request(self.config_file_content, "vector_test", "geojson", 0, 1, 0) geojson_result = json.loads(tile_content) self.assertEqual(tile_mimetype, "text/json") self.assertEqual(geojson_result['type'], 'FeatureCollection') self.assertEqual(len(geojson_result['features']), 1) self.assertTrue('Berlin' in geojson_result['features'][0]['properties']['name'])
def import_aquifers(self): aquifers = geojson.load(open( 'src/iwlearn.project/iwlearn/project/dataimport/aquifers.json', 'r')) parent = self.portal_url.getPortalObject()['iw-projects']['basins']['aquifers'] for aquifer in aquifers['features']: ext = idn.normalize(aquifer['properties']['FIRST_ISAR']) new_obj_id = idn.normalize(aquifer['properties']['NAME']) + ext if new_obj_id in parent: mpoly = [] new_obj=parent[new_obj_id] geo = IGeoManager(new_obj) add_geom = asShape(aquifer['geometry']).simplify(0.2) my_geom = wkt.loads(geo.wkt) if my_geom.geom_type == 'MultiPolygon': for poly in my_geom.geoms: if poly.contains(add_geom): continue elif my_geom.contains(add_geom): continue elif add_geom.contains(my_geom): q = add_geom.__geo_interface__ else: if add_geom.geom_type == 'Polygon': mpoly.append(add_geom) elif add_geom.geom_type == 'MultiPolygon': mpoly += list(add_geom.geoms) if my_geom.geom_type == 'Polygon': mpoly.append(my_geom) elif my_geom.geom_type == 'MultiPolygon': mpoly += list(my_geom.geoms) q = MultiPolygon(mpoly).__geo_interface__ geo.setCoordinates(q['type'], q['coordinates']) print new_obj_id, '*' else: self.portal_types.constructContent('Basin', parent, new_obj_id) new_obj=parent[new_obj_id] print new_obj_id new_obj.setTitle(aquifer['properties']['NAME']) new_obj.setDescription("Area: %s; Length: %s" % ( aquifer['properties']['Shape_Area'], aquifer['properties']['Shape_Leng'])) new_obj.setBasin_type('Aquifer') color='c1742c' style = IGeoCustomFeatureStyle(new_obj) style.geostyles.data['use_custom_styles']=True style.geostyles.data['polygoncolor']=color style.geostyles.update(style.geostyles) geo = IGeoManager(new_obj) q = asShape(aquifer['geometry']).simplify(0.2).__geo_interface__ geo.setCoordinates(q['type'], q['coordinates'])
def reprojectLine(obj, inProjection = 'epsg:3857', outProjection = 'epsg:4326'): #if our object is a raw string if type(obj) == str: obj = geojson.loads(obj) #if our object is a geojson dict if type(obj) == dict: #is our geojson a feature class? try: line = asShape(obj['geometry']) #or a regular geometry? except KeyError: line = asShape(obj)
def read_input_layer(l): c = fiona.open(INFILE, 'r', driver='OpenFileGDB', layer=l) #-- store the geometries in a list (shapely objects) dPolys = {} for gid, each in enumerate(c): if MERGEFEATURES is True: k = each['properties']['TOP10_ID'] else: k = gid if k not in dPolys: dPolys[k] = [asShape(each['geometry'])] else: dPolys[k].append(asShape(each['geometry'])) return dPolys
def difference(self): body = loads(self.request.body) if "geometries" not in body or \ type(body["geometries"]) != list or \ len(body["geometries"]) != 2: # pragma: no cover raise HTTPBadRequest("""Wrong body, it should be like that: { "geometries": [geomA, geomB] } """) return to_shape(DBSession.query(func.ST_Difference( from_shape(asShape(body["geometries"][0])), from_shape(asShape(body["geometries"][1])) )).scalar())
def check_geometry(r, feature, o): # we need both the "original" and "new" geometry to be # within the restriction area geom_attr, srid = self._get_geom_col_info(layer) geom_attr = getattr(o, geom_attr) geom = feature.geometry allowed = DBSession.query(func.count(RestrictionArea.id)) allowed = allowed.join(RestrictionArea.roles) allowed = allowed.join(RestrictionArea.layers) allowed = allowed.filter(RestrictionArea.readwrite.is_(True)) allowed = allowed.filter(Role.id == self.request.user.role.id) allowed = allowed.filter(Layer.id == layer.id) allowed = allowed.filter(or_( RestrictionArea.area.is_(None), RestrictionArea.area.ST_Contains(geom_attr) )) spatial_elt = None if geom and not isinstance(geom, geojson.geometry.Default): shape = asShape(geom) spatial_elt = from_shape(shape, srid=srid) allowed = allowed.filter(or_( RestrictionArea.area.is_(None), RestrictionArea.area.ST_Contains(spatial_elt) )) if allowed.scalar() == 0: raise HTTPForbidden() # check is geometry is valid self._validate_geometry(spatial_elt)
def geom(item): try: g = item['geometry'] except KeyError: b = item['bbox'] g = box2poly(b) return asShape(g)
def loadBoundingBoxes(mappingFilename, delimiter, geojsonFiles, idPropName, labelPropName=None): mapping = {} mappingFile = csv.reader(open(mappingFilename), delimiter=delimiter) # geonames -> FK for row in mappingFile: if row[1] not in mapping: mapping[row[1]] = [] mapping[row[1]].append(row[0]) for f in geojsonFiles: print 'Processing %s' % f data = geojson.loads(open(f).read()) if 'features' in data: data = data['features'] for f in data: id = str(f['properties'][idPropName]) print "looking for %s" % id if id in mapping: for geonameid in mapping[id]: bounds = asShape(f['geometry']).bounds label = f['properties'][labelPropName] if labelPropName else '' (minlng, minlat, maxlng, maxlat) = bounds print (u'%s -> %s (%s) [%s,%s][%s,%s]' % (id, geonameid, label, minlat, minlng, maxlat, maxlng)).encode('utf-8') geonames.update({"geonameid": geonameid}, {"$set": {"bb": { 'ne': [maxlat, maxlng], 'sw': [minlat, minlng] }}})
def cascaded_union(shapes): o = [] for shape in shapes: if not hasattr(shape,'__geo_interface__'): raise TypeError("%r does not appear to be a shape"%shape) o.append(geom.asShape(shape)) res = shops.cascaded_union(o) return asShape(res)
def import_oceans(self): oceans = geojson.load(open( 'src/iwlearn.project/iwlearn/project/dataimport/oceans.json', 'r')) parent = self.portal_url.getPortalObject()['iw-projects']['basins']['oceans'] for ocean in oceans['features']: mpoly = [] geom = asShape(ocean['geometry']) for g in geom.geoms: if g.area > 5: mpoly.append(g) mp = MultiPolygon(mpoly).simplify(0.2) q = mp.__geo_interface__ new_obj_id = idn.normalize(ocean['properties']['NAME']) print new_obj_id self.portal_types.constructContent('Basin', parent, new_obj_id) new_obj=parent[new_obj_id] new_obj.setTitle(ocean['properties']['NAME']) new_obj.setBasin_type('Ocean') color='aa22ff' style = IGeoCustomFeatureStyle(new_obj) style.geostyles.data['use_custom_styles']=True style.geostyles.data['polygoncolor']=color style.geostyles.update(style.geostyles) geo = IGeoManager(new_obj) geo.setCoordinates(q['type'], q['coordinates'])
def read_one(self): set_common_headers(self.request, "layers", NO_CACHE, add_cors=True) layer = self._get_layer_for_request() protocol = self._get_protocol_for_layer(layer) feature_id = self.request.matchdict.get("feature_id", None) feature = protocol.read(self.request, id=feature_id) if not isinstance(feature, Feature): return feature if layer.public: return feature if self.request.user is None: raise HTTPForbidden() geom = feature.geometry if not geom or isinstance(geom, geojson.geometry.Default): # pragma: no cover return feature shape = asShape(geom) srid = self._get_geom_col_info(layer)[1] spatial_elt = from_shape(shape, srid=srid) allowed = DBSession.query(func.count(RestrictionArea.id)) allowed = allowed.join(RestrictionArea.roles) allowed = allowed.join(RestrictionArea.layers) allowed = allowed.filter(Role.id == self.request.user.role.id) allowed = allowed.filter(Layer.id == layer.id) allowed = allowed.filter(or_( RestrictionArea.area.is_(None), RestrictionArea.area.ST_Contains(spatial_elt) )) if allowed.scalar() == 0: raise HTTPForbidden() return feature
def value_to_shape(value): """Transforms input into a Shapely object""" if not value: return wkt.loads('GEOMETRYCOLLECTION EMPTY') if isinstance(value, basestring): # We try to do this before parsing json exception # exception are ressource costly if '{' in value: geo_dict = geojson.loads(value) shape_to_return = asShape(geo_dict) elif value: # if value is empty sting we return False to be orm coherent, #may be we should return an empty shapely shape_to_return = wkt.loads(value) else: return False elif hasattr(value, 'wkt'): #Nasty but did not find equivalent of base string for shapely if 'shapely.geometry' in str(type(value)): shape_to_return = value else: shape_to_return = wkt.loads(value.wkt) else: raise TypeError('Write/create/search geo type must be wkt/geojson ' 'string or must respond to wkt') return shape_to_return
def getGeometryWKT(self): """Return WKT representation of geometry""" parts = self._getGeometryRaw().split(':') j = '{"type": "%s", "coordinates": %s}' % ( parts[0].strip(), parts[1].strip()) d = simplejson.loads(j) return wkt.dumps(asShape(d))
def poly_point_simplify(features, min_area): for feature in features: geom = asShape(feature['geometry']) if geom.area < min_area: newgeom = geom.centroid feature['geometry'] = mapping(newgeom) yield feature
def shape_poly(self): with fiona.open(self.boundary) as fiona_collection: # In this case, we'll assume the shapefile only has one later shapefile_record = fiona_collection.next() # Use Shapely to create the polygon self.polygon = geometry.asShape( shapefile_record['geometry'] ) return self.polygon
def import_lakes2(self): lakes = geojson.load(open( 'src/iwlearn.project/iwlearn/project/dataimport/missing_lakes.json', 'r')) parent = self.portal_url.getPortalObject()['iw-projects']['basins']['lakes'] for lake in lakes['features']: if lake['properties']['TYPE']=='Lake': new_obj_id = idn.normalize(lake['properties']['GLWD_ID']) print new_obj_id self.portal_types.constructContent('Basin', parent, new_obj_id) new_obj=parent[new_obj_id] if lake['properties']['LAKE_NAME']: new_obj.setTitle(lake['properties']['LAKE_NAME']) new_obj.setDescription("Area: %s; Perimeter: %s; Countries: %s" % ( lake['properties']['AREA_SKM'], lake['properties']['PERIM_KM'], lake['properties']['COUNTRY'], )) new_obj.setBasin_type('Lake') color='2c80d3' style = IGeoCustomFeatureStyle(new_obj) style.geostyles.data['use_custom_styles']=True style.geostyles.data['polygoncolor']=color style.geostyles.update(style.geostyles) geo = IGeoManager(new_obj) q = asShape(lake['geometry']).simplify(0.1).__geo_interface__ geo.setCoordinates(q['type'], q['coordinates'])
def parse_flickr_geojson(flickr_file): json_data = codecs.open(flickr_file, "r", "utf-8").read() data = demjson.decode(json_data) features = data['features'] for feature in features: woe_id = str(feature['properties']['woe_id']) name = feature['properties']['label'] if not name: continue feature_type = feature['properties']['place_type'] feature_code = str(feature['properties']['place_type_id']) json_geometry = feature['geometry'] updated = "2011-01-08 00:00:00+00" #i.e. as from http://code.flickr.com/blog/2011/01/08/flickr-shapefiles-public-dataset-2-0/ geometry = asShape(json_geometry).wkt out_line = ['F', woe_id, name, feature_type, feature_code, updated, geometry ] print "\t".join(out_line) return flickr_file
def genFeature(self, geom, allGeoms, errorCounter): try: curShape = asShape(geom) allGeoms.append(curShape) except Exception as e: logging.error(explain_validity(curShape)) errorCounter += 1 return allGeoms, errorCounter
def shapeup(self, ob): if isinstance(ob, BaseGeometry): return ob else: try: return asShape(ob) except ValueError: return asLineString(ob)
def test_empty_geointerface_adapter(): d = {"type": "GeometryCollection", "geometries": []} geom = asShape(d) assert geom.geom_type == "GeometryCollection" assert geom.is_empty assert len(geom) == 0 assert geom.geoms == []
def test_geointerface_adapter(geometrycollection_geojson): geom = asShape(geometrycollection_geojson) assert geom.geom_type == "GeometryCollection" assert len(geom) == 2 geom_types = [g.geom_type for g in geom.geoms] assert "Point" in geom_types assert "LineString" in geom_types
def index_hexagons(hexagons, grid): grid_coor = [] for feature in grid.features: point = geometry.asShape(feature.geometry) x = point.centroid.x y = point.centroid.y grid_coor.append([x, y]) grid_coor = np.array(grid_coor) grid_tree = cKDTree(grid_coor) for feature in hexagons.features: midpoint = geometry.asShape(feature.geometry) x = midpoint.centroid.x y = midpoint.centroid.y xy = np.array([x, y]) dist, index = grid_tree.query(xy) feature.properties["face_cell"] = index return hexagons
def test_graphical_isochrones_speed_factor(self): q_speed_2 = ( "v1/coverage/main_routing_test/isochrones?from={}&max_duration={}&datetime={}&walking_speed={}" ) q_speed_2 = q_speed_2.format(s_coord, '3600', '20120614T0800', 2) response_speed_2 = self.query(q_speed_2) q_speed_1 = "v1/coverage/main_routing_test/isochrones?from={}&datetime={}&max_duration={}" q_speed_1 = q_speed_1.format(s_coord, '20120614T080000', '3600') isochrone = self.query(q_speed_1) is_valid_graphical_isochrone(response_speed_2, self.tester, q_speed_2) for isochrone_speed_2, isochrone in zip(response_speed_2['isochrones'], isochrone['isochrones']): multi_poly_speed_2 = asShape(isochrone_speed_2['geojson']) multi_poly = asShape(isochrone['geojson']) assert multi_poly_speed_2.contains(multi_poly)
def test_graphical_isochrones_no_seconds_in_datetime(self): q_no_s = "v1/coverage/main_routing_test/isochrones?from={}&max_duration={}&datetime={}" q_no_s = q_no_s.format(s_coord, '3600', '20120614T0800') response_no_s = self.query(q_no_s) q_s = "v1/coverage/main_routing_test/isochrones?from={}&datetime={}&max_duration={}" q_s = q_s.format(s_coord, '20120614T080000', '3600') isochrone = self.query(q_s) is_valid_graphical_isochrone(response_no_s, self.tester, q_no_s) assert len(response_no_s['isochrones']) == len(isochrone['isochrones']) for isochrone_no_s, isochrone in zip(response_no_s['isochrones'], isochrone['isochrones']): multi_poly_no_s = asShape(isochrone_no_s['geojson']) multi_poly = asShape(isochrone['geojson']) assert multi_poly_no_s.equals(multi_poly)
def geojson_validator(value): if value: try: gjson = json.loads(value) shape = asShape(gjson) except ValueError: raise Invalid(_("Invalid GeoJSON")) return value
def makeSinglePolygonFromSingleLine(lineGeom): ''' This is incredibly simple at barebones. Input needs to be a shapely line geometry ''' geom = geometry.asShape(lineGeom) poly = geometry.asPolygon(geom) return poly
def test_empty_geointerface_adapter(self): d = {"type": "GeometryCollection", "geometries": []} # asShape m = asShape(d) self.assertEqual(m.geom_type, "GeometryCollection") self.assertEqual(len(m), 0) self.assertEqual(m.geoms, [])
def geometry(self): """return the multi-line geometry from one or more matches""" # get a list of lists of coords lines = [ asShape(matching['geometry']) for matching in self.response['matchings'] ] return MultiLineString(lines)
def test_write_polygon(self): with collection("docs/data/test_uk.shp", "r") as input: schema = input.schema.copy() with collection("test_write_polygon.shp", "w", "ESRI Shapefile", schema) as output: for f in input: f['geometry'] = mapping(asShape(f['geometry']).buffer(1.0)) output.write(f)
def test_graphical_isochrons_no_datetime(self): q_no_dt = "v1/coverage/main_routing_test/isochrons?from={}&max_duration={}&_current_datetime={}" q_no_dt = q_no_dt.format(s_coord, '3600', '20120614T080000') response_no_dt = self.query(q_no_dt) q_dt = "v1/coverage/main_routing_test/isochrons?from={}&datetime={}&max_duration={}" q_dt = q_dt.format(s_coord, '20120614T080000', '3600') isochron = self.query(q_dt) is_valid_graphical_isochron(response_no_dt, self.tester, q_no_dt) assert len(response_no_dt['isochrons']) == len(isochron['isochrons']) for isochron_no_dt, isochron in zip(response_no_dt['isochrons'], isochron['isochrons']): multi_poly_no_datetime = asShape(isochron_no_dt['geojson']) multi_poly = asShape(isochron['geojson']) assert multi_poly_no_datetime.equals(multi_poly)
def post_station(info_role): """ Post one occhab station (station + habitats) .. :quickref: OccHab; Post one occhab station (station + habitats) :returns: GeoJson<TStationsOcchab> """ data = dict(request.get_json()) occ_hab = None properties = data['properties'] if "t_habitats" in properties: occ_hab = properties.pop("t_habitats") observers_list = None if "observers" in properties: observers_list = properties.pop("observers") station = TStationsOcchab(**properties) shape = asShape(data["geometry"]) two_dimension_geom = remove_third_dimension(shape) station.geom_4326 = from_shape(two_dimension_geom, srid=4326) if observers_list is not None: observers = ( DB.session.query(User).filter( User.id_role.in_( list(map(lambda user: user['id_role'], observers_list)) )).all() ) for o in observers: station.observers.append(o) t_hab_list_object = [] if occ_hab is not None: for occ in occ_hab: if occ['id_habitat'] is None: occ.pop('id_habitat') data_attr = [k for k in occ] for att in data_attr: if not getattr(THabitatsOcchab, att, False): occ.pop(att) t_hab_list_object.append(THabitatsOcchab(**occ)) # set habitat complexe station.is_habitat_complex = len(t_hab_list_object) > 1 station.t_habitats = t_hab_list_object if station.id_station: user_cruved = get_or_fetch_user_cruved( session=session, id_role=info_role.id_role, module_code="OCCHAB" ) # check if allowed to update or raise 403 station.check_if_allowed(info_role, 'U', user_cruved["U"]) DB.session.merge(station) else: DB.session.add(station) DB.session.commit() return station.get_geofeature()
def get_results_for_boundary(boundary, charge_filter, max_results): """Returns the results of land charges contained in a extent :param boundary: Extent to search for land charges within :param charge_filter: String indicating whether to filter out cancelled charges :param max_results: Max number of land charges to be returned. Defaults to 1000 :return: Json representation of the results """ try: extent_shape = asShape(boundary) geo_extent_shape = shape.from_shape(unary_union(extent_shape), srid=27700) subquery = db.session.query(GeometryFeature.local_land_charge_id, GeometryFeature.geometry) \ .distinct(GeometryFeature.local_land_charge_id) \ .filter(func.ST_DWithin(GeometryFeature.geometry, geo_extent_shape, 0)) \ .subquery() if charge_filter: charge_query = LocalLandCharge.query \ .filter(LocalLandCharge.id == subquery.c.local_land_charge_id) \ .filter(or_(~func.ST_Touches(subquery.c.geometry, geo_extent_shape), ~LocalLandCharge.llc_item.contains( {'charge-sub-category': 'Conditional planning consent'}))) \ .filter(LocalLandCharge.cancelled.isnot(True)) \ .order_by(LocalLandCharge.llc_item[SORT_BY_FIELD].desc()) else: charge_query = LocalLandCharge.query \ .filter(LocalLandCharge.id == subquery.c.local_land_charge_id) \ .filter(or_(~func.ST_Touches(subquery.c.geometry, geo_extent_shape), ~LocalLandCharge.llc_item.contains( {'charge-sub-category': 'Conditional planning consent'}))) \ .order_by(LocalLandCharge.llc_item[SORT_BY_FIELD].desc()) num_results = charge_query.count() if num_results > max_results: current_app.logger.info("Search-area: {0}, " "Number-of-charges: {1}, " "Normal-limit: {2}, " "Too many charges returned".format( boundary, num_results, max_results)) raise ApplicationError("Too many charges, search a smaller area", 507, 507) llc_result = charge_query.all() if llc_result and len(llc_result) > 0: current_app.logger.info("Returning local land charges") return json.dumps( model_mappers.map_llc_result_to_dictionary_list( llc_result)), 200, { 'Content-Type': 'application/json' } else: raise ApplicationError("No land charges found", 404, 404) except (ValueError, TypeError) as err: raise ApplicationError("Unprocessable Entity. {}".format(err), 422, 422)
def set_geo_fence(request): try: assert request.headers['Content-Type'] == 'application/json' except AssertionError as ae: msg = {"message":"Unsupported Media Type"} return HttpResponse(json.dumps(msg), status=415, mimetype='application/json') try: geo_json_fc = request.data except KeyError as ke: msg = json.dumps({"message":"A geofence object is necessary in the body of the request"}) return HttpResponse(msg, status=400) shp_features = [] for feature in geo_json_fc['features']: shp_features.append(asShape(feature['geometry'])) combined_features = unary_union(shp_features) bnd_tuple = combined_features.bounds bounds = ''.join(['{:.7f}'.format(x) for x in bnd_tuple]) try: s_time = geo_json_fc[0]['properties']["start_time"] except KeyError as ke: start_time = arrow.now().isoformat() else: start_time = arrow.get(s_time).isoformat() try: e_time = geo_json_fc[0]['properties']["end_time"] except KeyError as ke: end_time = arrow.now().shift(hours=1).isoformat() else: end_time = arrow.get(e_time).isoformat() try: upper_limit = Decimal(geo_json_fc[0]['properties']["upper_limit"]) except KeyError as ke: upper_limit = 500.00 try: lower_limit = Decimal(geo_json_fc[0]['properties']["upper_limit"]) except KeyError as ke: lower_limit = 100.00 try: name = geo_json_fc[0]['properties']["name"] except KeyError as ke: name = "Standard Geofence" raw_geo_fence = json.dumps(geo_json_fc) geo_f = GeoFence(raw_geo_fence = raw_geo_fence,start_datetime = start_time, end_datetime = end_time, upper_limit= upper_limit, lower_limit=lower_limit, bounds= bounds, name= name) geo_f.save() write_geo_fence.delay(geo_fence = raw_geo_fence) op = json.dumps ({"message":"Geofence Declaration submitted", 'id':str(geo_f.id)}) return HttpResponse(op, status=200)
def testing(): """I understand we need both elevation and drainage direction to delineate the watershed. We can compute the drainage direction from the DEM using flowdir. Downloaded the au and ca files from hydrosheds at 30sec resolution. Issues with affine transformation. Flow directions seem to be saved in int16 to limit file size. I assume this is why dirmap takes powers of 2 to indicate directions. Catchment delineation does not seem to work. """ from matplotlib import pyplot as plt dem_fn = "../../tests/testdata/ca_dem_30s/ca_dem_30s/" dir_fn = "../../tests/testdata/ca_dir_30s/ca_dir_30s/" fv = -32768 grid = Grid.from_raster(dir_fn, "dir", nodata=fv) grid.read_raster(dem_fn, "dem", nodata=fv) lon, lat = -99.0619, 20.933 fig, (ax1, ax2) = plt.subplots(1, 2) idem = ax1.imshow(grid.view("dem"), extent=grid.extent, cmap="cubehelix", zorder=1, vmin=0) plt.colorbar(idem, ax=ax1, label="Elevation (m)") idir = ax2.imshow(grid.view("dir"), extent=grid.extent, cmap="viridis", zorder=2, vmin=0) boundaries = [0] + sorted(list(dirmap)) plt.colorbar(idir, ax=ax2, boundaries=boundaries, values=sorted(dirmap)) grid.catchment( data="dir", x=lon, y=lat, dirmap=dirmap, out_name="catch", xytype="label", nodata_in=nodata, ) catch = grid.polygonize(grid.catch.astype("int32"), connectivity=8) grid.clip_to("catch") for (p, v) in catch: poly = geometry.asShape(p) ax1.plot(*poly.exterior.xy, color="white") plt.show()
def extract_shapefile(shapefile, uri_name, simplify_tolerance=None): for feature in collection(shapefile, "r"): geometry = feature["geometry"] properties = feature["properties"] #calculate centroid geom_obj = asShape(geometry) try: centroid = [geom_obj.centroid.x , geom_obj.centroid.y] except AttributeError: print "Error: ", feature continue if properties["NAME"]: name = properties["NAME"] else: continue #feature code mapping feature_code = "ADM3" if properties["LSAD"] == "Resvn": feature_code = "RESV" area = properties["CENSUSAREA"] source = properties #keep all fields anyhow # unique URI which internally gets converted to the place id # Must be unique! uri = uri_name + "." + properties["GEO_ID"] + "."+ feature["id"] timeframe = {} timeframe = {"start": "2000-01-01","start_range":0, "end": "2010-01-01", "end_range":0} updated = "2012-01-31" place = { "name":name, "centroid":centroid, "feature_code": feature_code, "geometry":geometry, "is_primary": True, "source": source, "alternate": [], "updated": updated, "area": area, "uris":[uri], "relationships": [], "timeframe":timeframe, "admin":[] } #print place dump.write(uri, place)
def get_sympo_zone2(self): """ Retourne les zones du departement concernes """ # read the zone sympos file for the corresponding department fname_mask = '../GeoData/zones_sympo_multiples/'+self.dept+'_mask_zones_sympos.nc' da_mask = xr.open_dataarray(fname_mask) # On lit le fichier de zone homogene self.zone_homo_id = self.reading_homogeneous_results() # As we are able to create zone which are not part of the "zones_sympo_combined_dpt.json", we need to create them (to plot them. # when the combined zone is not part of the "zones_sympo_combined_dpt.json" we need to create it with open("../GeoData/ZonesSympo/zones_sympo_4326.json","r") as fp: poly_geo = json.load(fp) # list of the zones sympo id in the json file (zones_sympo_4326.json) feature=[] # Will contain the list of new area zs_json=[poly_geo["features"][i]["properties"]["id"] for i in range(len(poly_geo["features"]))] for val in self.zone_homo_id: if not (val in list(da_mask.id.values)): #print('qd est ce qu on est dans ce cas ci?',val,self.step) val_l=val.split('+') for j,zs in enumerate(val_l): if j==0: # init shape id_json=zs_json.index(zs) shape = sh.asShape(poly_geo['features'][id_json]['geometry']) else: id_json=zs_json.index(zs) shape=shape.union(sh.asShape(poly_geo['features'][id_json]['geometry'])) feature.append(geojson.Feature(geometry=shape,properties = {'id':val})) data = geojson.FeatureCollection(feature) zsympo = "../GeoData/ZonesSympo/zones_sympo_combined_"+self.dept+".json" with open(zsympo) as geojson1: poly_geojson = json.load(geojson1) if not len(feature)==0: for new_feat in data["features"]: poly_geojson["features"].append(new_feat) self.region_geo2 = self.extract_geozone(poly_geojson,self.zone_homo_id) # for the sake of not bugging the function ipyl.Choropleth self.homo_zone = dict(zip(self.zone_homo_id,np.asarray(self.temps_l).astype(float)))
def test_graphical_isochrones_forbidden_uris(self): basic_query = "v1/coverage/main_routing_test/isochrones?from={}&datetime={}&max_duration={}" basic_query = basic_query.format(s_coord, "20120614T080000", "300") basic_response = self.query(basic_query) q_forbidden_uris = basic_query + "&forbidden_uris[]=A" response_forbidden_uris = self.query(q_forbidden_uris) is_valid_graphical_isochrone(basic_response, self.tester, basic_query) is_valid_graphical_isochrone(response_forbidden_uris, self.tester, basic_query) for basic_isochrone, isochrone_forbidden_uris in zip( basic_response['isochrones'], response_forbidden_uris['isochrones'] ): basic_multi_poly = asShape(basic_isochrone['geojson']) multi_poly_forbidden_uris = asShape(isochrone_forbidden_uris['geojson']) assert not multi_poly_forbidden_uris.contains(basic_multi_poly) assert basic_multi_poly.area > multi_poly_forbidden_uris.area
def test_write_point(self): with collection("docs/data/test_uk.shp", "r") as input: schema = input.schema.copy() schema['geometry'] = 'Point' with collection("test_write_point.shp", "w", "ESRI Shapefile", schema) as output: for f in input.filter(bbox=(-5.0, 55.0, 0.0, 60.0)): f['geometry'] = mapping(asShape(f['geometry']).centroid) output.write(f)
def get_geoms_for_bounds(self, bounds): """ Helper method to get geometries within a certain bounds (as WKT). Returns GeoJSON (loaded as a list of python dictionaries). """ return [ asShape(e[1]['geometry']) for e in self._source.items(bbox=bounds) ]
def geojsonWkt(): import geojson p = geojson.Point([-92, 37]) geojs = geojson.dumps(p) print(geojs) # Use __geo_interface__ between geojson and shapely from shapely.geometry import asShape point = asShape(p) print(point.wkt)
def preprocess_cycling(data): for segment in data['features']: geometry = sg.asShape(segment['geometry']) p = segment['properties'] properties = { 'way_id': p['ID'], 'link_id': p['ID_TRC_GEOBASE'], 'type': p['TYPE_VOIE'] + 10, 'sens': 0, } if (geometry.type == 'LineString'): yield { 'geometry': sg.asShape(segment['geometry']), 'properties': properties } elif (geometry.type == 'MultiLineString'): for line in geometry: yield {'geometry': line, 'properties': properties}
def test_graphical_isochrones_allowed_id(self): basic_query = "v1/coverage/main_routing_test/isochrones?from={}&datetime={}&max_duration={}" basic_query = basic_query.format(s_coord, "20120614T080000", "300") basic_response = self.query(basic_query) q_allowed_id = basic_query + "&allowed_id[]=B" response_allowed_id = self.query(q_allowed_id) is_valid_graphical_isochrone(basic_response, self.tester, basic_query) is_valid_graphical_isochrone(response_allowed_id, self.tester, basic_query) for basic_isochrone, isochrone_allowed_id in zip( basic_response['isochrones'], response_allowed_id['isochrones'] ): basic_multi_poly = asShape(basic_isochrone['geojson']) multi_poly_allowed_id = asShape(isochrone_allowed_id['geojson']) assert not multi_poly_allowed_id.contains(basic_multi_poly) assert basic_multi_poly.area > multi_poly_allowed_id.area
def test_geometrycollection_adapter_deprecated(): d = { "type": "GeometryCollection", "geometries": [{ "type": "Point", "coordinates": (0, 3) }, { "type": "LineString", "coordinates": ((2, 0), (1, 0)) }] } with pytest.warns(ShapelyDeprecationWarning): asShape(d) d = {"type": "GeometryCollection", "geometries": []} with pytest.warns(ShapelyDeprecationWarning): asShape(d)
def centrality_corr_neighbors_multiple(zipcode, path): """ Find the relationship between node of given zipcode at t0 and node's neighbors at t1 """ # Load relevant centrality measure for graph of interest info = json.load( open('data/{}/{}/distance/{}/zip/centrality/{}_{}_{}.json'.format( *path))) zipcode_cent = info[zipcode][:-1] zipcode_shape = asShape(_geometry.find_one({'zip': zipcode})['geometry']) # Delete keys if they do not interest zipcode of interest for z in info.keys(): if not asShape(_geometry.find_one( {'zip': z})['geometry']).intersects(zipcode_shape): del info[z] y = dict() for z in sorted(info.keys()): y[str(z)] = pearsonr(zipcode_cent, info[z][1:])[0] return y
def PointsAlongLine(params: Parameters, **kwargs): """ Generates regularly spaced points along input linestrings """ shapefile = params.lines.filename(**kwargs) output = params.output.filename(**kwargs) with fiona.open(shapefile) as fs: options = dict(driver=fs.driver, crs=fs.crs) def open_output_shapefile(filename): """ output coroutine """ schema = { 'geometry': 'Point', 'properties': [('AXIS', 'int'), ('CDENTITEHY', 'str:8'), ('TOPONYME', 'str:254'), ('distance', 'float')] } with fiona.open(filename, 'w', schema=schema, **options) as fst: while True: feature = (yield) fst.write(feature) sink = open_output_shapefile(output) next(sink) with click.progressbar(fs) as iterator: for feature in iterator: axis = feature['properties']['AXIS'] toponyme = feature['properties']['TOPONYME'] cdenthy = feature['properties']['CDENTITEHY'] geometry = asShape(feature['geometry']) for dist in np.arange(0, geometry.length, params.distance): point = geometry.interpolate(dist) outfeature = { 'geometry': point.__geo_interface__, 'properties': { 'AXIS': axis, 'TOPONYME': toponyme, 'CDENTITEHY': cdenthy, 'distance': dist } } sink.send(outfeature) sink.close()
def make_fragment_str(geojson_fragment, buffer=100): if geojson_fragment['type'] == 'LineString': shape = asShape(geojson_fragment) lat = shape.centroid.y x, y = get_size_in_degrees(buffer, lat) geojson_fragment = shape.buffer(y).__geo_interface__ geojson_fragment['crs'] = {"type": "name", "properties": {"name": "EPSG:4326"}} return json.dumps(geojson_fragment)
def test_junction_x(self): from tilequeue.tile import coord_to_bounds from shapely.geometry import LineString, asShape from ModestMaps.Core import Coordinate import dsl z, x, y = (12, 2048, 2048) minx, miny, maxx, maxy = coord_to_bounds( Coordinate(zoom=z, column=x, row=y)) midx = 0.5 * (minx + maxx) midy = 0.5 * (miny + maxy) road_props = dict( highway='residential', source='openstreetmap.org', ) # make a tile with 4 roads in an X shape, as below. # # \ / # 1 2 # \/ # /\ # 3 4 # / \ # # these should get merged into two lines 1->4 & 2->3. self.generate_fixtures( dsl.way(1, LineString([[minx, maxy], [midx, midy]]), road_props), dsl.way(2, LineString([[maxx, maxy], [midx, midy]]), road_props), dsl.way(3, LineString([[minx, miny], [midx, midy]]), road_props), dsl.way(4, LineString([[maxx, miny], [midx, midy]]), road_props), ) with self.features_in_tile_layer(z, x, y, 'roads') as features: # multilinestrings which contain lines which cross (as in the X # above) are "non-simple", and many geometry operations start by # forcing multilinestrings to be simple. we don't want this, as # it introduces an extra coordinate where the lines cross. # instead, we split into features which are individually simple, # which means we'll need 2 in this example. self.assertTrue(len(features) == 2) # when the test suite runs in "download only mode", an empty # set of features is passed into this block. the assertion # is shorted out, so we need this additional check which is # trivially satisfied in the case we're doing real testing. if len(features) == 2: for i in (0, 1): # the shapes should be single linestrings in this example. shape = asShape(features[i]['geometry']) self.assertTrue(shape.geom_type == 'LineString') # consisting of _only two_ points. (i.e: one didn't get # inserted into the middle) self.assertTrue(len(shape.coords) == 2)