def modify_df(self,df): """Adds a geometry column extracted from the other columns""" self.dtypes={} #change all panda object types to ocnvert to a Sqlalcehmy String for ky,typ in df.dtypes.items(): if type(object) == typ: self.dtypes[ky]=String if re.search('A-GLACIER.csv',self.pdfile): #extract longitude and latitude self.dtypes["geom"]=geoPointtype df.LONGITUDE=df.LONGITUDE.fillna(0) df.LATITUDE=df.LATITUDE.fillna(0) df['geom']=[WKTElement(wktdump(Point(lon,lat,0)),srid=4326,extended=True) for lon,lat in zip(df.LONGITUDE,df.LATITUDE)] if re.search('EEE-MASS-BALANCE-POINT.csv',self.pdfile): #extract longitude and latitude df.POINT_LON=df.POINT_LON.fillna(0) df.POINT_LAT=df.POINT_LAT.fillna(0) df.POINT_ELEVATION=df.POINT_ELEVATION.fillna(0) df['geom']=[WKTElement(wktdump(Point(lon,lat,h)),srid=4326,extended=True) for lon,lat,h in zip(df.POINT_LON,df.POINT_LAT,df.POINT_ELEVATION)] self.dtypes["geom"]=geoPointtype return df
def items(db_session): camera = Item( item_name='Camera Nikon', lat=51.5200005, lng=-0.0955810994, # London item_url='london/camera', img_urls='[camera.jpg]', geom=WKTElement('POINT({} {})'.format(-0.0955810994, 51.5200005))) db_session.add(camera) camera_lenses = Item( item_name='Camera Lenses Canon', lat=51.752022, lng=-1.257677, # Oxford item_url='oxford/camera-lenses', img_urls='[camera-lenses.jpg]', geom=WKTElement('POINT({} {})'.format(-1.257677, 51.752022))) db_session.add(camera_lenses) lenses = Item( item_name='Lenses Canon', lat=34.152231, lng=-118.22321, # LA item_url='la/lenses', img_urls='[lenses.jpg]', geom=WKTElement('POINT({} {})'.format(-118.243683, 34.052235))) db_session.add(lenses) db_session.commit()
def uploadGDFtoPostGIS(gdf,tableName): # this function uploads a shapefile to table in AWS RDS. # It handles combined polygon/multipolygon geometry and stores it in multipolygon gdf2 = gdf.copy() gdf2["type"] = gdf2.geometry.geom_type gdfPolygon = gdf2.loc[gdf2["type"]=="Polygon"] gdfMultiPolygon = gdf2.loc[gdf2["type"]=="MultiPolygon"] gdfPolygon2 = gdfPolygon.copy() gdfMultiPolygon2 = gdfMultiPolygon.copy() gdfPolygon2['geom'] = gdfPolygon['geometry'].apply(lambda x: WKTElement(x.wkt, srid=4326)) gdfMultiPolygon2['geom'] = gdfMultiPolygon['geometry'].apply(lambda x: WKTElement(x.wkt, srid=4326)) gdfPolygon2.drop("geometry",1, inplace=True) gdfMultiPolygon2.drop("geometry",1, inplace=True) gdfPolygon2.drop("type",1, inplace=True) gdfMultiPolygon2.drop("type",1, inplace=True) gdfPolygon2.to_sql("temppolygon", engine, if_exists='replace', index=False, dtype={'geom': Geometry('POLYGON', srid= 4326)}) gdfMultiPolygon2.to_sql("tempmultipolygon", engine, if_exists='replace', index=False, dtype={'geom': Geometry('MULTIPOLYGON', srid= 4326)}) sql = "ALTER TABLE temppolygon ALTER COLUMN geom type geometry(MultiPolygon, 4326) using ST_Multi(geom);" result = connection.execute(sql) sql = "CREATE TABLE %s AS (SELECT * FROM temppolygon UNION SELECT * FROM tempmultipolygon);" %(tableName) result = connection.execute(sql) sql = "DROP TABLE temppolygon,tempmultipolygon" result = connection.execute(sql) sql = "select * from %s" %(tableName) gdfFromSQL =gpd.GeoDataFrame.from_postgis(sql,connection,geom_col='geom' ) return gdfFromSQL
def setup_method(self, _): import transaction from sqlalchemy import func from geoalchemy2 import WKTElement from c2cgeoportal_commons.models import DBSession from c2cgeoportal_commons.models.main import FullTextSearch entry1 = FullTextSearch() entry1.label = "label 1" entry1.layer_name = "layer1" entry1.ts = func.to_tsvector("french", "soleil travail") entry1.the_geom = WKTElement("POINT(-90 -45)", 21781) entry1.public = True entry2 = FullTextSearch() entry2.label = "label 2" entry2.layer_name = "layer2" entry2.ts = func.to_tsvector("french", "pluie semaine") entry2.the_geom = WKTElement("POINT(-90 -45)", 21781) entry1.public = True entry3 = FullTextSearch() entry3.label = "label 3" entry3.layer_name = "layer2" entry3.ts = func.to_tsvector("french", "vent neige") entry3.the_geom = WKTElement("POINT(-90 -45)", 21781) entry1.public = True DBSession.add_all([entry1, entry2, entry3]) transaction.commit()
def test_Raster(self): if not postgis_version.startswith('2.'): raise SkipTest from geoalchemy2 import WKTElement, RasterElement polygon = WKTElement('POLYGON((0 0,1 1,0 1,0 0))', srid=4326) o = Ocean(polygon.ST_AsRaster(5, 5)) session.add(o) session.flush() session.expire(o) ok_(isinstance(o.rast, RasterElement)) height = session.execute(o.rast.ST_Height()).scalar() eq_(height, 5) width = session.execute(o.rast.ST_Width()).scalar() eq_(width, 5) # The top left corner is covered by the polygon top_left_point = WKTElement('Point(0 1)', srid=4326) top_left = session.execute(o.rast.ST_Value(top_left_point)).scalar() eq_(top_left, 1) # The bottom right corner has NODATA bottom_right_point = WKTElement('Point(1 0)', srid=4326) bottom_right = session.execute( o.rast.ST_Value(bottom_right_point)).scalar() eq_(bottom_right, None)
def decide_geometric_operation(self, srid): """ Decides the simple cases of geometric filter operations. Args: srid (int): The SRID/EPSG number to define the coordinate system of the geometry attribute. Returns: sqlalchemy.sql.expression._BinaryExpression: The clause element Raises: HTTPBadRequest """ if self.operator == 'INTERSECTS': clause = self.column.ST_Intersects( WKTElement(self.value, srid=srid)) elif self.operator == 'TOUCHES': clause = self.column.ST_Touches(WKTElement(self.value, srid=srid)) elif self.operator == 'COVERED_BY': clause = self.column.ST_CoveredBy(WKTElement(self.value, srid=srid)) elif self.operator == 'WITHIN': clause = self.column.ST_DFullyWithin( WKTElement(self.value, srid=srid)) else: raise HTTPBadRequest( 'The operator "{operator}" you passed is not implemented.'. format(operator=self.operator)) return clause
def interactedusers(): # TODO Check if the users were at the same point within 3 seconds # making interacted_id and person_id the if request.method == 'POST': print('inside post of interactedusers') request_data = request.get_json(force=True) main_person_id = get_jwt_identity()['id'] # user_email = get_jwt_identity()['email'] # this statement is not needed # print('this is user: {}'.format(User.query.filter_by(email=user_email).first())) # main_person_id = db.session.query(User.id).filter(User.email == user_email).first() # also if the interaction is already present then update the interaction for n in range(len(request_data['listOfInteractedUsers'])): print(request_data['listOfInteractedUsers'][n]['lat']) temp_lat = Decimal(request_data['listOfInteractedUsers'][n]['lat']) temp_lon = Decimal(request_data['listOfInteractedUsers'][n]['lng']) temp_interacted_id = Decimal(request_data['listOfInteractedUsers'][n]['interacted_id']) temp_user = db.session.query(InteractedUsers).filter(InteractedUsers.person_id==main_person_id).filter(InteractedUsers.interacted_id==temp_interacted_id).first() if temp_user: print('this is in interaction, update the location and timestamp') point_wkt = WKTElement('SRID=4326;POINT({} {})'.format(temp_lon, temp_lat), srid=4326) temp_user.at_location = point_wkt temp_user.at_time = dt.datetime.utcnow() db.session.add(temp_user) else: print('not present so entering into db') point_wkt = WKTElement('SRID=4326;POINT({} {})'.format(temp_lon, temp_lat), srid=4326) db.session.add(InteractedUsers(point_wkt, main_person_id, temp_interacted_id)) db.session.commit() print('pushed all the interacted user points to the database') # db.session.add(InteractedUsers()) return jsonify({'hello': 'world'})
def setUp(self): # noqa import transaction from sqlalchemy import func from geoalchemy2 import WKTElement from c2cgeoportal.models import FullTextSearch, User, Role, Interface from c2cgeoportal.models import DBSession user1 = User(username=u"__test_user1", password=u"__test_user1") role1 = Role(name=u"__test_role1", description=u"__test_role1") user1.role_name = role1.name user2 = User(username=u"__test_user2", password=u"__test_user2") role2 = Role(name=u"__test_role2", description=u"__test_role2") user2.role_name = role2.name entry1 = FullTextSearch() entry1.label = "label1" entry1.layer_name = "layer1" entry1.ts = func.to_tsvector("french", "soleil travail") entry1.the_geom = WKTElement("POINT(-90 -45)", 21781) entry1.public = True entry2 = FullTextSearch() entry2.label = "label2" entry2.layer_name = "layer2" entry2.ts = func.to_tsvector("french", "pluie semaine") entry2.the_geom = WKTElement("POINT(-90 -45)", 21781) entry2.public = False entry3 = FullTextSearch() entry3.label = "label3" entry3.layer_name = "layer3" entry3.ts = func.to_tsvector("french", "vent neige") entry3.the_geom = WKTElement("POINT(-90 -45)", 21781) entry3.public = False entry3.role = role2 entry4 = FullTextSearch() entry4.label = "label4" entry4.layer_name = "layer1" entry4.ts = func.to_tsvector("french", "soleil travail") entry4.the_geom = WKTElement("POINT(-90 -45)", 21781) entry4.public = True entry5 = FullTextSearch() entry5.label = "label5" entry5.ts = func.to_tsvector("french", "params") entry5.public = True entry5.params = {"floor": 5} entry5.actions = [{"action": "add_layer", "data": "layer1"}] entry6 = FullTextSearch() entry6.label = "label6" entry6.ts = func.to_tsvector("french", "params") entry6.interface = Interface("main") entry6.public = True DBSession.add_all([user1, user2, role1, role2, entry1, entry2, entry3, entry4, entry5, entry6]) transaction.commit()
def setup_method(self, _): # Always see the diff # https://docs.python.org/2/library/unittest.html#unittest.TestCase.maxDiff self.maxDiff = None from c2cgeoportal_commons.models import DBSession from c2cgeoportal_commons.models.main import Interface, LayerWMS, RestrictionArea, Role from c2cgeoportal_commons.models.static import User setup_db() user1 = User(username="******", password="******") role1 = Role(name="__test_role1", description="__test_role1") user1.roles = [role1] user1.email = "Tarenpion" user2 = User(username="******", password="******") role2 = Role(name="__test_role2", description="__test_role2") user2.roles = [role2] user2.email = "Tarenpion" ogc_server_internal = create_default_ogcserver() main = Interface(name="main") layer1 = LayerWMS("layer_1", public=False) layer1.layer = "layer_1" layer1.ogc_server = ogc_server_internal layer1.interfaces = [main] layer2 = LayerWMS("layer_2", public=False) layer2.layer = "layer_2" layer2.ogc_server = ogc_server_internal layer2.interfaces = [main] layer3 = LayerWMS("layer_3", public=False) layer3.layer = "layer_3" layer3.ogc_server = ogc_server_internal layer3.interfaces = [main] area = "POLYGON((-100 30, -100 50, 100 50, 100 30, -100 30))" area = WKTElement(area, srid=21781) restricted_area1 = RestrictionArea("__test_ra1", "", [layer1, layer2], [role1], area, readwrite=True) area = "POLYGON((-100 0, -100 20, 100 20, 100 0, -100 0))" area = WKTElement(area, srid=21781) restricted_area2 = RestrictionArea("__test_ra2", "", [layer1, layer2, layer3], [role2], area) DBSession.add_all( [user1, user2, role1, role2, restricted_area1, restricted_area2]) transaction.commit()
def test_data(dbsession, transact): from c2cgeoportal_commons.models.main import LayerVectorTiles del transact points = { "p1": PointTest(geom=WKTElement("POINT(599910 199955)", srid=21781), name="foo", city="Lausanne", country="Swiss"), "p2": PointTest(geom=WKTElement("POINT(599910 200045)", srid=21781), name="bar", city="Chambéry", country="France"), "p3": PointTest(geom=WKTElement("POINT(600090 200045)", srid=21781), name="éàè", city="Paris", country="France"), "p4": PointTest(geom=WKTElement("POINT(600090 199955)", srid=21781), name="123", city="Londre", country="UK"), } dbsession.add_all(points.values()) layers = { "layer_vector_tiles": LayerVectorTiles( name="layer_vector_tiles", style="https://example.com/style.json", sql=""" SELECT ST_AsMVT(q, 'mvt_routes') FROM ( SELECT name, city, country, ST_AsMVTGeom("geom", {envelope}) as geom FROM geodata.testpoint WHERE ST_Intersects("geom", {envelope}) ) AS q """, ) } dbsession.add_all(layers.values()) dbsession.flush() yield { "layers": layers, "points": points, }
class DbTestCase(unittest.TestCase): driver = Driver( firstname='Robin', lastname='Ambert' ) depart = 'POINT(-126.4 45.32)' arrivee = 'POINT(-124.4 42.32)' itinerary = Itinerary( mission="Transport", departure=WKTElement(depart, srid=4326), arrival=WKTElement(arrivee, srid=4326) ) position = 'POINT(-126.4 45.32)' truck_position = TruckPosition( iddriver=2, idtruck=1, status="test", iditinerary=2, timestamp=datetime.now(), position=WKTElement(position, srid=4269) ) def setUp(self): session.add(DbTestCase.driver) session.commit() session.add(DbTestCase.itinerary) session.commit() session.add(DbTestCase.truck_position) session.commit() def tearDown(self): session.delete(DbTestCase.driver) session.commit() session.delete(DbTestCase.itinerary) session.commit() session.delete(DbTestCase.truck_position) session.commit() def test_insertions(self): q_driver = session.query(Driver).filter_by( firstname = 'Robin', lastname = 'Ambert') self.assertIsNotNone(q_driver.first()) q_itinerary = session.query(Itinerary).filter_by( mission = 'Transport') self.assertIsNotNone(q_itinerary.first()) q_truckposition = session.query(TruckPosition).filter_by( status = 'test') self.assertIsNotNone(q_truckposition.first())
def graph_nodes_to_db(G, schema, table_name, host, database, user, password, port = 5432): #check if table already in database conn = psycopg2.connect(host=host,database=database, user=user, password=password) c = conn.cursor() sql = "SELECT to_regclass('{}.{}');".format(schema, table_name) c.execute(sql) #if table exist do nothing if c.fetchall()[0][0] is not None: print('nodes already in database') else: #if nodes are not in database, save using geopandas print('saving {} to database'.format(table_name)) nodes = gpd.GeoDataFrame(dict(G.nodes(data=True))).T nodes.geometry = [Point(node['x'], node['y']) for i, node in nodes.iterrows()] #convert geometry to well know text format nodes['geom'] = nodes['geometry'].apply(lambda x: WKTElement(x.wkt, srid=27700)) # drop the geometry column as it is now duplicative nodes.drop('geometry', 1, inplace=True) engine = create_engine('postgresql://{user}:{password}@{host}:{port}/{database}'.format( user = user, password = password, host = host, database = database, port = port )) nodes.to_sql(table_name, engine, if_exists='replace', index=False, dtype={'geom': Geometry('POINT', srid=27700)})
def transform(flist, maxWorkers=5, spatialFilter=None): with ThreadPoolExecutor(max_workers=maxWorkers) as executor: gdfs = list(executor.map(lambda x: parseFile(x), flist)) merged = gpd.GeoDataFrame(pd.concat(gdfs, ignore_index=True)) if spatialFilter is not None: clipRegion = gpd.read_file(spatialFilter) else: land = gpd.read_file(gpd.datasets.get_path('naturalearth_lowres')) land['code'] = 1 clipRegion = gpd.GeoDataFrame(land.dissolve(by='code').buffer(0.075)) clipRegion.columns = ['geometry'] clipRegion.crs = {'init': 'epsg:4326'} with ThreadPoolExecutor(max_workers=maxWorkers) as executor: matches = list( executor.map(lambda x: spatialSelect(merged, x), clipRegion.geometry)) keepPts = gpd.GeoDataFrame(pd.concat(matches, ignore_index=True)) keepPts['geom'] = keepPts['geometry'].apply( lambda x: WKTElement(x.wkt, srid=4326)) outGdf = keepPts.drop('geometry', axis=1) return outGdf
def get_indices(directory): """ Grabs indices from bayleef package and returns a record dictionary, only really used for creating tables being uploaded to Postgresself. Parameters ---------- directory : str Folder with the bayleef package Returns ------- : dict Dictionary with row data """ index_file = path.join(directory, 'index.json') indices = json.load(open(index_file, 'r')) data = {} for key in indices.keys(): if "time" in key: data[key] = datetime(**indices[key]) if "geom" in key: data[key] = WKTElement(indices[key]) else: data[key] = indices[key] return data
def google_location(loc): time.sleep(0.5) gg = gcd.google(loc + ', London UK') while gg.status != 'ZERO_RESULTS': if gg.status == 'OK': pc = gg.postal if len(pc) > 4: ac = pc[:pc.find(' ')] else: ac = pc return { 'address': gg.address, 'lng': gg.lng, 'lat': gg.lat, 'hood': gg.neighborhood, 'my_hood': areacode_to_hood(ac), 'the_geom': WKTElement('POINT(' + str(gg.lat) + ' ' + str(gg.lng) + ')', srid=4326), 'post_code': pc, 'area_code': ac } if gg.status is 'OVER_QUERY_LIMIT': return None
def sendFileToPostGIS(vector_file, password, access_rights): print(f"Reading in file {vector_file}...") df = gpd.read_file(vector_file) # CREATE POSTGRESQL TABLE NAME. SAME AS FILE WITHOUT EXTENSION (LOWERCASE IMPORTANT) table_name = os.path.basename(vector_file).split(".")[0].lower() # GET GEOMETRY TYPE IN UPPERCASE geom_type = df.geometry.iloc[0].geom_type.upper() # GET EPSG CODE try: epsg_code = int(df.crs.get('init').split(":")[1]) except: print( "Unable to get get epsg code of geodataframe: Found CRS : {}\nExiting." .format(df.crs)) raise ValueError df['geom'] = df['geometry'].apply( lambda x: WKTElement(x.wkt, srid=epsg_code)) df.drop("geometry", 1, inplace=True) print("writing to postgres as table {}...".format(table_name)) engine = create_engine('postgresql://*****:*****@geo.library.arizona.edu:5440/UAL_geoData') df.to_sql(table_name.lower(), engine, schema=access_rights, if_exists='replace', index=True, index_label="OBJECTID", dtype={"geom": Geometry(geom_type, srid=epsg_code)}) return epsg_code
def ImportFromDataframe(engine, dataframe, table_name, schema=None, if_exists='append', srid=-1): """ 3 choices for "if_existe" arg : fail, replace, append, """ if is_geodataframe(dataframe): geometry_col = find_geometry_column(dataframe) print("si") print(geometry_col) dataframe['geom'] = dataframe[geometry_col].apply( lambda x: WKTElement(x.wkt, srid=srid)) dataframe.drop(geometry_col, 1, inplace=True) dtype = {'geom': Geometry('GEOMETRY', srid=srid)} else: print("else") dtype = None #drop the geometry column as it is now duplicative dataframe.to_sql(table_name, engine, schema=schema, if_exists=if_exists, index=False, dtype=dtype)
def save_scores_to_db(scores_df: pd.DataFrame, table_name: str, experiment: str, train_hash: str): """ Save Building probas to a db Args: scores_df: the scores data, with geometries as indices, and models as columns table_name: name of the table to insert the data into experiment: name of the experiment resulted in those probas train_hash: hash of the train geometries - used for kfolds Returns: None """ table_name = table_name.lower() eng = get_connection(db_name='POSTGRES') for model in scores_df.columns: insert_df = pd.DataFrame(data={MODIFICATION_DATE: datetime.datetime.now(), GEOM: scores_df.index, SCORES: scores_df[model], MODEL: model, EXPERIMENT: experiment, TRAIN_HASH: train_hash}).reset_index(drop=True) insert_df[GEOM_WKT] = insert_df[GEOM].apply(lambda g: g.wkt) # add hash column for the GEOM_WKT # use md5, consistently insert_df[GEOM_WKT_HASH] = [str(h) for h in pd.util.hash_pandas_object(insert_df[GEOM_WKT])] insert_df[GEOM] = insert_df[GEOM].apply(lambda x: WKTElement(x.wkt, srid=4326)) merge_to_table(eng, insert_df, table_name, compare_columns=[GEOM_WKT_HASH, EXPERIMENT, MODEL, TRAIN_HASH], update_columns=[MODIFICATION_DATE, SCORES, GEOM, GEOM_WKT], dtypes=DTYPES) eng.dispose()
def setup_method(self, _): from c2cgeoportal_commons.models import DBSession from c2cgeoportal_commons.models.main import Role, LayerWMS, RestrictionArea, Interface from c2cgeoportal_commons.models.static import User ogc_server_internal = create_default_ogcserver() role1 = Role(name="__test_role1", description="__test_role1") user1 = User(username="******", password="******", settings_role=role1, roles=[role1]) user1.email = "Tarenpion" main = Interface(name="main") layer1 = LayerWMS("testpoint_group_name", public=False) layer1.layer = "testpoint_group" layer1.ogc_server = ogc_server_internal layer1.interfaces = [main] layer2 = LayerWMS("testpoint_protected_2_name", public=False) layer2.layer = "testpoint_protected_2" layer2.ogc_server = ogc_server_internal layer2.interfaces = [main] area = "POLYGON((-100 30, -100 50, 100 50, 100 30, -100 30))" area = WKTElement(area, srid=21781) restricted_area1 = RestrictionArea("__test_ra1", "", [layer1, layer2], [role1], area) DBSession.add_all([user1, role1, layer1, layer2, restricted_area1]) DBSession.flush() transaction.commit()
def lookup_coordinates(session): form = LatLongForm(request.form) if request.method == 'POST' and form.validate(): lon = form.longitude.data lat = form.latitude.data current_point = WKTElement('POINT(%s %s)' % (lon, lat), srid=4326) zones = session\ .query(EEZ12.geoname)\ .filter(func.ST_Within(current_point, EEZ12.geom))\ .order_by(EEZ12.geoname)\ .all() countries = session\ .query(WorldBorders.name)\ .filter(func.ST_Within(current_point, WorldBorders.geom))\ .order_by(WorldBorders.name)\ .all() data = { 'input_lat': lat, 'input_lon': lon, 'zones': [r.geoname for r in zones], 'countries': [r.name for r in countries], } return render_template('ocean/lookup_coordinates.html', form=form, data=data) return render_template('ocean/lookup_coordinates.html', form=form, data=None)
def as_postgis(self, area): area = self.rec_round(area) area = area if self.clockwise is None else self.as_directed( area, self.clockwise) string = "%s((%s))" % (self.geometry_type, ",".join( ["%s %s" % (a[0], a[1]) for a in area])) return WKTElement(string, srid=self.srid)
def get_missions(lat, lon, radius, limit, lang, user_id): try: location = WKTElement('POINT(' + str(lon) + ' ' + str(lat) + ')', srid=4326) no_of_errors = 10 # get already solved error ids already_solved = db_session.query(api.models.Solution.error_id). \ filter(api.models.Solution.user_id == user_id) # get nearest neighbors candidates from location q = db_session.query(api.models.kort_errors.schema, api.models.kort_errors.errorId) \ .filter((~api.models.kort_errors.errorId.in_(already_solved))) \ .order_by(api.models.kort_errors.geom.distance_centroid(location)) \ .limit(limit*no_of_errors).subquery() # partition by error type q = db_session.query(api.models.kort_errors, func.row_number().over( partition_by=api.models.kort_errors.error_type).label("row_number")) \ .filter(tuple_(api.models.kort_errors.schema, api.models.kort_errors.errorId).in_(q))\ .filter(func.ST_DistanceSphere(api.models.kort_errors.geom, location) < radius).subquery() # set max errors of each type q = db_session.query( api.models.kort_errors).select_entity_from(q).filter( q.c.row_number <= limit / no_of_errors) except Exception as e: logger.error(traceback.format_exc()) return [p.dump(lang) for p in q][:limit]
def save_geo_series_to_tmp_table(geo_series: gpd.GeoSeries, eng: sa.engine.Engine) -> str: """ Save a geo series as a table in the db, for better performance Args: geo_series: The GeoSeries to be inserted into a db table eng: SQL Alchemy engine Returns: The name of the new table """ geo_series = geo_series.rename('geom') gdf = gpd.GeoDataFrame(geo_series, columns=['geom'], geometry='geom') gdf['geom'] = gdf.geometry.apply(lambda x: WKTElement(x.wkt, srid=4326)) gdf['geom_id'] = range(len(gdf)) tbl_name = get_temp_table_name() insert_into_table(eng, gdf, tbl_name, dtypes={ 'geom': Geography(srid=4326), 'geom_id': sa.INT }) add_postgis_index(eng, tbl_name, 'geom') return tbl_name
def setup_method(self, _): # Always see the diff # https://docs.python.org/2/library/unittest.html#unittest.TestCase.maxDiff self.maxDiff = None # pylint: disable=invalid-name self._tables = [] from c2cgeoportal_commons.models import DBSession from c2cgeoportal_commons.models.main import Role from c2cgeoportal_commons.models.static import User setup_db() role1 = Role(name="__test_role1") user1 = User(username="******", password="******", settings_role=role1, roles=[role1]) user1.email = "*****@*****.**" role2 = Role(name="__test_role2", extent=WKTElement("POLYGON((1 2, 1 4, 3 4, 3 2, 1 2))", srid=21781)) user2 = User(username="******", password="******", settings_role=role2, roles=[role2]) DBSession.add_all([user1, user2]) DBSession.flush() self.role1_id = role1.id transaction.commit()
def transform(flist, maxWorkers=5,spatialFilter=None): with ThreadPoolExecutor(max_workers=maxWorkers) as executor: gdfs = list(executor.map(lambda x: parseFile(x), flist)) merged = gpd.GeoDataFrame( pd.concat( gdfs, ignore_index=True) ) if spatialFilter is not None: clipRegion= gpd.read_file(spatialFilter) else: land = gpd.read_file(gpd.datasets.get_path('naturalearth_lowres')) land['code'] = 1 clipRegion = gpd.GeoDataFrame(land.dissolve(by='code').buffer(0.075)) clipRegion.columns = ['geometry'] clipRegion.crs = {'init': 'epsg:4326'} matches = [] spatial_index = merged.sindex for index, row in clipRegion.iterrows(): possible_matches_index = list(spatial_index.intersection(row.geometry.bounds)) possible_matches = merged.iloc[possible_matches_index] matches.append(possible_matches[possible_matches.intersects(row.geometry)]) keepPts = gpd.GeoDataFrame( pd.concat( matches, ignore_index=True) ) keepPts['geom'] = keepPts['geometry'].apply(lambda x: WKTElement(x.wkt, srid=4326)) outGdf = keepPts.drop('geometry', axis=1) return outGdf
def test_insert(self): from geoalchemy2 import WKTElement, WKBElement conn = engine.connect() # Issue two inserts using DBAPI's executemany() method. This tests # the Geometry type's bind_processor and bind_expression functions. conn.execute(Lake.__table__.insert(), [ {'geom': 'SRID=4326;LINESTRING(0 0,1 1)'}, {'geom': WKTElement('LINESTRING(0 0,2 2)', srid=4326)} # Having WKBElement objects as bind values is not supported, so # the following does not work: #{'geom': from_shape(LineString([[0, 0], [3, 3]], srid=4326)} ]) results = conn.execute(Lake.__table__.select()) rows = results.fetchall() row = rows[0] ok_(isinstance(row[1], WKBElement)) wkt = session.execute(row[1].ST_AsText()).scalar() eq_(wkt, 'LINESTRING(0 0,1 1)') srid = session.execute(row[1].ST_SRID()).scalar() eq_(srid, 4326) row = rows[1] ok_(isinstance(row[1], WKBElement)) wkt = session.execute(row[1].ST_AsText()).scalar() eq_(wkt, 'LINESTRING(0 0,2 2)') srid = session.execute(row[1].ST_SRID()).scalar() eq_(srid, 4326)
def wkt_transformer(self, dataframe): dataframe['geom'] = dataframe['geometry'].apply( lambda x: WKTElement(x.wkt, srid=self.srid) ) dataframe.drop('geometry', axis=1, inplace=True) return dataframe
def setUp(self): # noqa from c2cgeoportal.models import User, Role, LayerV1, RestrictionArea, \ Interface, DBSession create_default_ogcserver() user1 = User(username=u"__test_user1", password=u"__test_user1") role1 = Role(name=u"__test_role1", description=u"__test_role1") user1.role_name = role1.name user1.email = u"Tarenpion" main = Interface(name=u"main") layer1 = LayerV1(u"testpoint_group", public=False) layer1.interfaces = [main] layer2 = LayerV1(u"testpoint_protected_2", public=False) layer2.interfaces = [main] area = "POLYGON((-100 30, -100 50, 100 50, 100 30, -100 30))" area = WKTElement(area, srid=21781) restricted_area1 = RestrictionArea(u"__test_ra1", u"", [layer1, layer2], [role1], area) DBSession.add_all([user1, role1, layer1, layer2, restricted_area1]) DBSession.flush() transaction.commit()
def loadshp(shpInput, connstr, lyrName, schema): #Load data in geodataframe geodataframe = geopandas.read_file(shpInput) #Identify CRS crs_name = str(geodataframe.crs.srs) #crs_name=str(geodataframe.crs['init']) ##type(crs_name) epsg = int(crs_name.replace('epsg:', '')) if epsg is None: epsg = 4326 #Identify Geometry type geom_type = geodataframe.geom_type[0] #print(geom_type) # Creating SQLAlchemy's engine to use engine = create_engine(connstr) #... [do something with the geodataframe] geodataframe['geom'] = geodataframe['geometry'].apply( lambda x: WKTElement(x.wkt, srid=epsg)) #drop the geometry column as it is now duplicative geodataframe.drop('geometry', 1, inplace=True) # Use 'dtype' to specify column's type # For the geom column, we will use GeoAlchemy's type 'Geometry' geodataframe.to_sql(lyrName, engine, schema='tekson', if_exists='append', index=False, dtype={'geom': Geometry(geom_type, srid=epsg)})
def get_indices(directory): """ Grabs indices from bayleef package and returns a record dictionary, only really used for creating tables being uploaded to Postgresself. Parameters ---------- directory : str Folder with the bayleef package Returns ------- : dict Dictionary with row data """ index_file = path.join(directory, 'index.json') indices = json.load(open(index_file, 'r')) time = datetime(**indices['time']) geom = WKTElement(indices['geom']) columns = ['id', 'time', 'geom'] data = [indices['id'], time, geom] return dict(zip(columns, data))