def _process(self, *args, properties=None, **kwargs): """ Return the plot dataframe. :param properties: List of properties to retain. Can be a python list or a comma (',') separated string. """ with Connector.get_connection() as connection: sel_keys = select([ func.jsonb_object_keys( meta.plot.c.properties ).distinct(), ]) if properties is None: keys = [i[0] for i in connection.execute(sel_keys).fetchall()] else: if isinstance(properties, str): properties = properties.split(',') keys = properties props = [meta.plot.c.properties[k].label(k) for k in keys] sel = select([ meta.plot.c.id.label('id'), meta.plot.c.name.label('name'), func.st_x(meta.plot.c.location).label('x'), func.st_y(meta.plot.c.location).label('y'), ] + props) df = pd.read_sql(sel, connection, index_col='id') # Replace None values with nan df.fillna(value=pd.np.NAN, inplace=True) return df, [], {'index_label': 'id'}
def test_update_view_action_success(self, flash_mock): """Test the create view action directly (successfull test)""" resource_view_create = toolkit.get_action('resource_view_create') resource_view_update = toolkit.get_action('resource_view_update') # First create a resource data_dict = dict(self.base_data_dict.items() + {'title': 'test4'}.items()) resource_view = resource_view_create(TestViewCreated.context, data_dict) # Now try to update it! data_dict['id'] = resource_view['id'] data_dict['longitude_field'] = 'long2' resource_view_update(TestViewCreated.context, data_dict) # Check we have lat/long values. This is done more extensively in test_actions. metadata = MetaData() table = Table(self.resource['resource_id'], metadata, autoload=True, autoload_with=TestViewCreated.engine) s = select([ table.c['latitude'], table.c['long2'], func.st_x(table.c['_geom']).label('x'), func.st_y(table.c['_geom']).label('y'), ]).where(table.c['_the_geom_webmercator'] != None) r = TestViewCreated.engine.execute(s) try: assert_equal(r.rowcount, 2) for row in r: assert_equal(float(row['x']), float(row['long2'])) assert_equal(float(row['y']), float(row['latitude'])) except: raise finally: r.close() # Check we have a message to inform us all went well assert_true(flash_mock.called) assert_equal(flash_mock.call_args[1]['category'], 'alert-success')
def getBeachWaterQual(): """ Queries Postgres AWS RDS to return the most recent water quality report data for each beach that is tested in SB County. Data are spread across tables with mapped relationships. This query joins the relevant tables and uses "distinct" on the waterQuality beach ID field, selecting only one record per beach, then "order_by" is used on the joined MD5 table to grab only the most recent record per beach. :return: List: Nested lists containing SQL Alchemy query results: 3 query result objects: waterQuality, waterqualityMD5 beaches 1 string: geometry type of associated beach 2 floats: x and y coordinates of the associated beach """ session = Session() records = session.query(waterQuality, waterQualityMD5, beaches, sqlfunc.ST_GeometryType(beaches.geom), sqlfunc.st_x(beaches.geom), sqlfunc.st_y(beaches.geom)) \ .join(waterQualityMD5) \ .join(beaches) \ .distinct(waterQuality.beach_id) \ .order_by(waterQuality.beach_id, waterQualityMD5.insdate.desc()).all() # Can't close session here because these results are used in another function # session.close() return records
def unit_search(request): q = request.params['term'] srs = int(request.params['srs']) if 'srs' in request.params else 4326 dbsession = DBSession() fields = ( Unit, func.st_xmin( func.st_transform(func.st_setsrid(UnitPolygon.geom, 4326), srs)).label('left'), func.st_xmax( func.st_transform(func.st_setsrid(UnitPolygon.geom, 4326), srs)).label('right'), func.st_ymin( func.st_transform(func.st_setsrid(UnitPolygon.geom, 4326), srs)).label('bottom'), func.st_ymax( func.st_transform(func.st_setsrid(UnitPolygon.geom, 4326), srs)).label('top'), func.st_x(func.st_transform(func.st_setsrid(UnitPoint.geom, 4326), srs)).label('x'), func.st_y(func.st_transform(func.st_setsrid(UnitPoint.geom, 4326), srs)).label('y'), ) result = dbsession.query(*fields) \ .outerjoin((UnitPolygon, UnitPolygon.unit_id == Unit.id)) \ .outerjoin((UnitPoint, UnitPoint.unit_id == Unit.id)) \ .filter(Unit.name.ilike('%' + q + '%')) \ .order_by(Unit.id_level4, Unit.name).limit(10) rows = [] for r in result: itm = r.Unit.as_dict() itm['value'] = r.Unit.name if r.left and r.top: itm['extent'] = (r.left, r.bottom, r.right, r.top) if r.x and r.y: itm['x'] = r.x itm['y'] = r.y rows.append(itm) return rows
def test_populate(self): """Ensure it's possible to first create the columns and populate/update them later""" create_geom_columns = toolkit.get_action('create_geom_columns') create_geom_columns(TestMapActions.context, { 'resource_id': self.resource['resource_id'], 'populate': False }) # Test the result did not populate the geom field metadata = MetaData() table = Table(self.resource['resource_id'], metadata, autoload=True, autoload_with=TestMapActions.engine) s = select(['*']).where(table.c['_the_geom_webmercator'] != None) r = TestMapActions.engine.execute(s) try: assert r.rowcount == 0, "Table was populated" except: raise finally: r.close() # Now populate the entries, and test they are correct. update_geom_columns = toolkit.get_action('update_geom_columns') update_geom_columns(TestMapActions.context, { 'resource_id': self.resource['resource_id'], 'latitude_field': 'latitude', 'longitude_field': 'longitude' }) metadata = MetaData() table = Table(self.resource['resource_id'], metadata, autoload=True, autoload_with=TestMapActions.engine) s = select([ table.c['latitude'], table.c['longitude'], func.st_x(table.c['_geom']).label('x'), func.st_y(table.c['_geom']).label('y'), table.c['skip'] ]).where(table.c['_the_geom_webmercator'] != None) r = TestMapActions.engine.execute(s) try: assert r.rowcount == 2, "Did not report the expected rows. Expecting {}, got {}".format(2, r.rowcount) for row in r: assert float(row['x']) == float(row['longitude']), "Longitude not correctly set" assert float(row['y']) == float(row['latitude']), "Latitude not correctly set" assert row['skip'] == 'no', "Row was included which should have not" except: raise finally: r.close()
def test_create_geom_columns(self): """ Test creating geom columns using default settings.""" # Create the geom columns create_geom_columns = toolkit.get_action('create_geom_columns') create_geom_columns(TestMapActions.context, { 'resource_id': self.resource['resource_id'], 'latitude_field': 'latitude', 'longitude_field': 'longitude' }) # Test we have the expected columns metadata = MetaData() table = Table(self.resource['resource_id'], metadata, autoload=True, autoload_with=TestMapActions.engine) assert '_geom' in table.c, "Column geom was not created" assert '_the_geom_webmercator' in table.c, "Column _the_geom_webmercator was not created" s = select([ table.c['latitude'], table.c['longitude'], func.st_x(table.c['_geom']).label('x'), func.st_y(table.c['_geom']).label('y'), table.c['skip'] ]).where(table.c['_the_geom_webmercator'] != None) r = TestMapActions.engine.execute(s) try: assert r.rowcount == 2, "Did not report the expected rows. Expecting {}, got {}".format(2, r.rowcount) for row in r: assert float(row['x']) == float(row['longitude']), "Longitude not correctly set" assert float(row['y']) == float(row['latitude']), "Latitude not correctly set" assert row['skip'] == 'no', "Row was included which should have not" except: raise finally: r.close() # Test we have the expected indices insp = reflection.Inspector.from_engine(TestMapActions.engine) index_exists = False for index in insp.get_indexes(self.resource['resource_id']): if ((self.resource['resource_id'] + u'__the_geom_webmercator_index').startswith(index['name']) and index['unique'] == False and len(index['column_names']) == 1 and index['column_names'][0] == u'_the_geom_webmercator'): index_exists = True break assert index_exists, "Index not created"
def _process(self, *args, properties=None, drop_null_properties=False, **kwargs): """ Return the occurrence dataframe. :param properties: List of properties to retain. Can be a python list or a comma (',') separated string. """ with Connector.get_connection() as connection: sel_keys = select([ func.jsonb_object_keys( meta.occurrence.c.properties ).distinct(), ]) if properties is None: keys = [i[0] for i in connection.execute(sel_keys).fetchall()] else: if isinstance(properties, str): properties = properties.split(',') keys = properties props = [meta.occurrence.c.properties[k].label(k) for k in keys] sel = select([ meta.occurrence.c.id.label('id'), meta.occurrence.c.taxon_id.label('taxon_id'), cast(meta.taxon.c.rank.label('rank'), String).label('rank'), meta.taxon.c.full_name.label('full_name'), func.st_x(meta.occurrence.c.location).label('x'), func.st_y(meta.occurrence.c.location).label('y'), ] + props).select_from( meta.occurrence.outerjoin( meta.taxon, meta.taxon.c.id == meta.occurrence.c.taxon_id ) ) df = pd.read_sql(sel, connection, index_col='id') df['taxon_id'] = df['taxon_id'].apply(pd.to_numeric) # Replace None values with nan df.fillna(value=pd.np.NAN, inplace=True) if drop_null_properties: for k in keys: df = df[df[k].notnull()] return df, [], {'index_label': 'id'}
def unit_search(request): q = request.params['term'] srs = int(request.params['srs']) if 'srs' in request.params else 4326 dbsession = DBSession() fields = ( Unit, func.st_xmin(func.st_transform(func.st_setsrid(UnitPolygon.geom, 4326), srs)).label('left'), func.st_xmax(func.st_transform(func.st_setsrid(UnitPolygon.geom, 4326), srs)).label('right'), func.st_ymin(func.st_transform(func.st_setsrid(UnitPolygon.geom, 4326), srs)).label('bottom'), func.st_ymax(func.st_transform(func.st_setsrid(UnitPolygon.geom, 4326), srs)).label('top'), func.st_x(func.st_transform(func.st_setsrid(UnitPoint.geom, 4326), srs)).label('x'), func.st_y(func.st_transform(func.st_setsrid(UnitPoint.geom, 4326), srs)).label('y'), ) result = dbsession.query(*fields) \ .outerjoin((UnitPolygon, UnitPolygon.unit_id == Unit.id)) \ .outerjoin((UnitPoint, UnitPoint.unit_id == Unit.id)) \ .filter(Unit.name.ilike('%' + q + '%')) \ .order_by(Unit.id_level4, Unit.name).limit(10) rows = [] for r in result: itm = r.Unit.as_dict() itm['value'] = r.Unit.name if r.left and r.top: itm['extent'] = (r.left, r.bottom, r.right, r.top) if r.x and r.y: itm['x'] = r.x itm['y'] = r.y rows.append(itm) return rows
import piecewise.config from sqlalchemy import MetaData, Table, Column, create_engine, func, select from sqlalchemy.sql.expression import label import json aggregator = piecewise.config.read_config( json.load(open("piecewise_config.json"))) spatial_bin = filter(lambda x: x.label == 'spatial_join', aggregator.bins)[0] engine = create_engine(aggregator.database_uri) metadata = MetaData() metadata.bind = engine geom = Column(spatial_bin.geometry_column) spatial_table = Table(spatial_bin.table, metadata, Column(spatial_bin.geometry_column)) import sys with engine.begin() as conn, open(sys.argv[1], 'w') as out: centroid = func.st_astext(func.st_centroid(func.st_collect(geom))) query = select([func.st_x(centroid), func.st_y(centroid)]).select_from(spatial_table) (x, y) = conn.execute(query).fetchone() out.write("var center = [{lat}, {lon}];".format(lat=y, lon=x))
import piecewise.config from sqlalchemy import MetaData, Table, Column, create_engine, func, select from sqlalchemy.sql.expression import label import json aggregator = piecewise.config.read_config(json.load(open("piecewise_config.json"))) spatial_bin = filter(lambda x: x.label == 'spatial_join', aggregator.bins)[0] engine = create_engine(aggregator.database_uri) metadata = MetaData() metadata.bind = engine geom = Column(spatial_bin.geometry_column) spatial_table = Table(spatial_bin.table, metadata, Column(spatial_bin.geometry_column)) import sys with engine.begin() as conn, open(sys.argv[1], 'w') as out: centroid = func.st_astext(func.st_centroid(func.st_collect(geom))) query = select([func.st_x(centroid), func.st_y(centroid)]).select_from(spatial_table) (x, y) = conn.execute(query).fetchone() out.write("var center = [{lat}, {lon}];".format(lat=y, lon=x))
# Write to PG, first get existing data to check for duplicates existing_mh = session.query(LocallyHeardStation.call, LocallyHeardStation.timestamp).all() existing_mh_data = [] for row in existing_mh: call = row.call timestamp = row.timestamp hms = timestamp.strftime("%H:%M:%S") existing_mh_data.append(f"{call} {hms}") radio_mh_list = sorted(radio_mh_list, key=lambda x: x[1], reverse=False) existing_ops = session.query(Operator.id, Operator.call, func.st_x(Operator.geom), func.st_y(Operator.geom), Operator.grid).all() existing_ops_data = {} for op in existing_ops: call = op[1] lon = op[2] lat = op[3] grid = op[4] existing_ops_data[call] = (lat, lon, grid) existing_digipeaters = session.query(Digipeater.call, func.st_x(Digipeater.geom), func.st_y(Digipeater.geom), Digipeater.heard).all() existing_digipeaters_data = {}
def get_nearest(lat, lon): # find the nearest point to the input coordinates # convert the input coordinates to a WKT point and query for nearest point pt = WKTElement('POINT({0} {1})'.format(lon, lat), srid=3857) # URL: https://stackoverflow.com/questions/33482653/geoalchemy2-get-the-lat-lon-of-a-point return db.session.query(Place, func.st_x(Place.geom), func.st_y(Place.geom)).order_by(Place.geom.distance_box(pt))