def lookup_coordinates(session): form = LatLongForm(request.form) if request.method == 'POST' and form.validate(): lon = form.longitude.data lat = form.latitude.data current_point = WKTElement('POINT(%s %s)' % (lon, lat), srid=4326) zones = session\ .query(EEZ12.geoname)\ .filter(func.ST_Within(current_point, EEZ12.geom))\ .order_by(EEZ12.geoname)\ .all() countries = session\ .query(WorldBorders.name)\ .filter(func.ST_Within(current_point, WorldBorders.geom))\ .order_by(WorldBorders.name)\ .all() data = { 'input_lat': lat, 'input_lon': lon, 'zones': [r.geoname for r in zones], 'countries': [r.name for r in countries], } return render_template('ocean/lookup_coordinates.html', form=form, data=data) return render_template('ocean/lookup_coordinates.html', form=form, data=None)
def pathandrow(cls, lat, lon): """ Output path and row that contains lat lon. """ try: scene = (Session.query(cls.path, cls.row).filter( func.ST_Within(func.ST_SetSRID( func.ST_MakePoint(float(lon), float(lat)), 4236), func.ST_SetSRID(cls.geom, 4236)), cls.mode == u'D').all()) return scene except: return u'----'
def timeseries(self, agg_unit, start, end, geom=None, column_filters=None): # Reading this blog post # http://no0p.github.io/postgresql/2014/05/08/timeseries-tips-pg.html # inspired this implementation. t = self.point_table # Special case for the 'quarter' unit of aggregation. step = '3 months' if agg_unit == 'quarter' else '1 ' + agg_unit # Create a CTE to represent every time bucket in the timeseries # with a default count of 0 day_generator = func.generate_series(func.date_trunc(agg_unit, start), func.date_trunc(agg_unit, end), step) defaults = select([sa.literal_column("0").label('count'), day_generator.label('time_bucket')]) \ .alias('defaults') where_filters = [t.c.point_date >= start, t.c.point_date <= end] if column_filters is not None: # Column filters has to be iterable here, because the '+' operator # behaves differently for SQLAlchemy conditions. Instead of # combining the conditions together, it would try to build # something like :param1 + <column_filters> as a new condition. where_filters += [column_filters] # Create a CTE that grabs the number of records contained in each time # bucket. Will only have rows for buckets with records. actuals = select([func.count(t.c.hash).label('count'), func.date_trunc(agg_unit, t.c.point_date). label('time_bucket')]) \ .where(sa.and_(*where_filters)) \ .group_by('time_bucket') # Also filter by geometry if requested if geom: contains = func.ST_Within(t.c.geom, func.ST_GeomFromGeoJSON(geom)) actuals = actuals.where(contains) # Need to alias to make it usable in a subexpression actuals = actuals.alias('actuals') # Outer join the default and observed values # to create the timeseries select statement. # If no observed value in a bucket, use the default. name = sa.literal_column("'{}'".format(self.dataset_name)) \ .label('dataset_name') bucket = defaults.c.time_bucket.label('time_bucket') count = func.coalesce(actuals.c.count, defaults.c.count).label('count') ts = select([name, bucket, count]). \ select_from(defaults.outerjoin(actuals, actuals.c.time_bucket == defaults.c.time_bucket)) return ts
def timeseries(self, agg_unit, start, end, geom=None, column_filters=None): # Reading this blog post # http://no0p.github.io/postgresql/2014/05/08/timeseries-tips-pg.html # inspired this implementation. t = self.point_table if agg_unit == 'quarter': step = '3 months' else: step = '1 ' + agg_unit # Create a CTE to represent every time bucket in the timeseries # with a default count of 0 day_generator = func.generate_series(func.date_trunc(agg_unit, start), func.date_trunc(agg_unit, end), step) defaults = select([sa.literal_column("0").label('count'), day_generator.label('time_bucket')])\ .alias('defaults') # Create a CTE that grabs the number of records # contained in each time bucket. # Will only have rows for buckets with records. where_filters = [t.c.point_date >= start, t.c.point_date <= end] if column_filters: where_filters += column_filters actuals = select([func.count(t.c.hash).label('count'), func.date_trunc(agg_unit, t.c.point_date). label('time_bucket')])\ .where(sa.and_(*where_filters))\ .group_by('time_bucket') # Also filter by geometry if requested if geom: contains = func.ST_Within(t.c.geom, func.ST_GeomFromGeoJSON(geom)) actuals = actuals.where(contains) # Need to alias to make it usable in a subexpression actuals = actuals.alias('actuals') # Outer join the default and observed values # to create the timeseries select statement. # If no observed value in a bucket, use the default. name = sa.literal_column("'{}'".format(self.dataset_name))\ .label('dataset_name') bucket = defaults.c.time_bucket.label('time_bucket') count = func.coalesce(actuals.c.count, defaults.c.count).label('count') ts = select([name, bucket, count]).\ select_from(defaults.outerjoin(actuals, actuals.c.time_bucket == defaults.c.time_bucket)) return ts
def find_trips_matching_line(cls, line, user_id): '''Find trips which completely contain the given line. Eventually, this method could also check to make sure that the given line is also heading in the same direction. This can be accomplished by finding the nearest point on a given route to each vertex on the input line (ST_LineLocatePoint). There is a function called ST_OrderingEquals which will find if lines are moving in the same direction. So those derived points could be compared to the route to check that the original input line is moving in the correct direction. Another interesting addition would be to relax the contraint that every point of the given line needs to be inside of the target route buffer. Instead, maybe only 90% of points from a sufficiently large sample size would be sufficient. ''' proj_line = cls.points_to_projected_line(line) s = session.query(Trip).filter_by(user_id=user_id).filter(func.ST_Within(proj_line, Trip.geom)) return s
def query_point(x, y: float, d_start, d_end: datetime) -> WeatherStat: """Запрос к БД Параметры: ---------- x, y : float Координаты точки d_start, d_end : datetime Даты начала и конца периода запроса """ session = Session() print( f"Searching for points near POINT({x}, {y}) from {d_start.strftime('%d-%m-%Y')} to {d_end.strftime('%d-%m-%Y')}" ) try: w = session.query(WeatherStat).filter( func.ST_Within(WKTElement(f'POINT({x} {y})', srid=SRID), WeatherStat.geom.ST_Buffer(10)), WeatherStat.date >= d_start, WeatherStat.date <= d_end).order_by( WeatherStat.date.asc()).distinct(WeatherStat.date) except Exception as e: print(e) return None return w
def test_ST_Buffer(self): from sqlalchemy.sql import select, func from geoalchemy2 import WKBElement, WKTElement lake_id = self._create_one() s = select([func.ST_Buffer(Lake.__table__.c.geom, 2)]) r1 = session.execute(s).scalar() ok_(isinstance(r1, WKBElement)) lake = session.query(Lake).get(lake_id) r2 = session.execute(lake.geom.ST_Buffer(2)).scalar() ok_(isinstance(r2, WKBElement)) r3 = session.query(Lake.geom.ST_Buffer(2)).scalar() ok_(isinstance(r3, WKBElement)) ok_(r1.data == r2.data == r3.data) r4 = session.query(Lake).filter( func.ST_Within(WKTElement('POINT(0 0)', srid=4326), Lake.geom.ST_Buffer(2))).one() ok_(isinstance(r4, Lake)) eq_(r4.id, lake_id)
def test_ST_Buffer_Mixed_SRID(self): from sqlalchemy.sql import func self._create_one() session.query(Lake).filter( func.ST_Within('POINT(0 0)', Lake.geom.ST_Buffer(2))).one()
session = self.session_maker() if bbox and len(bbox := bbox.split('),(')) > 1: bbox = [list(map(float, i.strip('()').split(','))) for i in bbox] if len(bbox) == 2: bbox = [ bbox[0], (bbox[1][0], bbox[0][1]), bbox[1], (bbox[0][0], bbox[1][1]) ] bbox.append(bbox[0]) else: bbox = None query_location = Issue.location if bbox: user_location = f'POLYGON(({", ".join(" ".join(map(str, i)) for i in bbox)}))' user_location = func.ST_GeomFromEWKT(f'SRID=4326;{user_location}') _filter = func.ST_Within(cast(Issue.location, Geometry), user_location) else: user_location = f'POINT({lat} {lon})' _filter = func.ST_DWithin(query_location, user_location, distance) filtered = session.query(Issue).filter(_filter) if pollution_types: filtered = filtered.filter( Issue.pollution_category_id.in_(pollution_types)) if state_types: filtered = filtered.filter(Issue.state_id.in_(state_types)) issues = filtered.order_by( Issue.pollution_rating.desc()).limit(count).all() return issues def get_issues(self, ids):
def fetch_street_edges_within(bounding_box): envelope_geom = StreetEdge._envelope(bounding_box, geom=True) query = session.query(StreetEdge).filter( func.ST_Within(func.ST_Transform(StreetEdge.geom, 4326), envelope_geom)) return query
def argoQuery(dbcon, geoWKT=None, tspan=None, withinDmeter=None, tsort=None): tbl = dbcon.getTable('argo2', 'oceanobs') #first create a subquery to quickly discard argo profiles subqry = select([tbl]) if tspan: subqry = subqry.where( func.overlaps(tbl.c.tstart, tbl.c.tend, tspan[0], tspan[1])) # Apply initial geospatial constraints if geoWKT: if withinDmeter: #only base initial constraints ont he bounding box subqry = subqry.where( func.ST_DWithin( literal_column('ST_Envelope(geom::geometry)::geography'), func.ST_GeogFromText(geoWKT), withinDmeter)) else: subqry = subqry.where( func.ST_Intersects(literal_column('geom::geometry'), func.ST_GeomFromText(geoWKT, 4326))) #we need to assign an alias to this subquery in order to work with it subqry = subqry.alias("ar") #expand the arrays and points int he subquery qry = select([ subqry.c.wmoid, subqry.c.uri, subqry.c.datacenter, func.unnest(subqry.c.mode).label('mode'), func.unnest(subqry.c.ascend).label('ascend'), func.unnest(subqry.c.tlocation).label('tlocation'), func.unnest(subqry.c.cycle).label('cycle'), func.unnest(subqry.c.iprof).label('iprof'), ST_Dump(literal_column("ar.geom::geometry")).geom.label('geom') ]) #additional spatial constraints finalqry = qry qry = qry.alias("arex") if tspan: finalqry = select([qry]).where( between(qry.c.tlocation, tspan[0], tspan[1])) if geoWKT: if withinDmeter: #only base initial constraints ont he bounding box finalqry = finalqry.where( func.ST_DWithin(qry.c.geom, func.ST_GeogFromText(geoWKT), withinDmeter)) else: finalqry = finalqry.where( func.ST_Within(literal_column("arex.geom"), func.ST_GeomFromText(geoWKT, 4326))) if tsort: finalqry = finalqry.order_by(qry.c.tlocation) return dbcon.dbeng.execute(finalqry)