async def post(self): try: args = RouteInputSchema().load( escape.json_decode(self.request.body)) except ValidationError as e: raise web.HTTPError(400, escape.json_encode(e.messages), e.messages) route_id = uuid.uuid4() data = args.data new_route = Route( id=str(route_id), origin=func.ST_GeomFromGeoJSON(escape.json_encode(data['origin'])), origin_name=data['origin_name'], destination=func.ST_GeomFromGeoJSON( escape.json_encode(data['destination'])), destination_name=data['destination_name'], polyline=func.ST_GeomFromGeoJSON(escape.json_encode(data['polyline'])), bounds=data.get('bounds'), created=data.get('created') or datetime.utcnow() ) self.db.add(new_route) self.db.commit() new_route_from_db = self._route_query().filter( Route.id == str(route_id)).one() schema = RouteOutputSchema() output = schema.dumps(row_to_dict(new_route_from_db)) self.finish(output.data)
def import_place(filename): app.config.from_object('config.default') database.init_app(app) data = json.load(open(filename)) place_fields = ['place_id', 'osm_type', 'osm_id', 'display_name', 'category', 'type', 'place_rank', 'icon', 'south', 'west', 'north', 'east', 'extratags', 'address', 'namedetails', 'item_count', 'candidate_count', 'state', 'override_name', 'lat', 'lon', 'wikidata_query_timeout', 'wikidata', 'item_types_retrieved', 'index_hide', 'overpass_is_in', 'existing_wikidata'] item_fields = ['item_id', 'enwiki', 'entity', 'categories', 'query_label', 'extract_names', 'tags', 'extracts'] isa_fields = ['item_id', 'entity', 'label'] candidate_fields = ['osm_id', 'osm_type', 'name', 'dist', 'tags', 'planet_table', 'src_id', 'identifier_match', 'address_match', 'name_match'] place = Place(**{key: data['place'][key] for key in place_fields}) geojson = json.dumps(data['place']['geom']) place.geom = func.ST_GeomFromGeoJSON(geojson) database.session.add(place) for isa_data in data['isa']: item_id = isa_data['item_id'] isa = IsA.query.get(item_id) if isa: for key in isa_fields: setattr(isa, key, isa_data[key]) else: isa = IsA(**{key: isa_data[key] for key in isa_fields}) database.session.add(isa) for item_data in data['items']: item_id = item_data['item_id'] assert item_data['ewkt'] item = Item(location=item_data['ewkt'], **{key: item_data[key] for key in item_fields}) for isa_id in item_data['isa']: item_isa = ItemIsA(item_id=item_id, isa_id=isa_id) database.session.add(item_isa) place.items.append(item) for candidate_data in item_data['candidates']: this = {key: candidate_data[key] for key in candidate_fields} candidate = ItemCandidate(item_id=item_id, **this) geojson = json.dumps(candidate_data['geom']) candidate.geom = func.ST_GeomFromGeoJSON(geojson) database.session.add(candidate) database.session.commit()
def from_geojson(cls, geojson): """Turn a GeoJSON string into a Geometry object that can be put into the database. """ geometry = func.ST_GeomFromGeoJSON(geojson) geometry = func.ST_SetSRID(geometry, 4326) return geometry
def get_dataset_extent_alchemy_expression(md: MetadataType, default_crs: str = None): """ Build an SQLaLchemy expression to get the extent for a dataset. It's returned as a postgis geometry. The logic here mirrors the extent() function of datacube.model.Dataset. """ doc = _jsonb_doc_expression(md) if 'grid_spatial' not in md.definition['dataset']: # Non-spatial product return None projection_offset = _projection_doc_offset(md) valid_data_offset = projection_offset + ['valid_data'] return func.ST_SetSRID( case( [ # If we have valid_data offset, use it as the polygon. (doc[valid_data_offset] != None, func.ST_GeomFromGeoJSON(doc[valid_data_offset].astext, type_=Geometry)), ], # Otherwise construct a polygon from the four corner points. else_=_bounds_polygon(doc, projection_offset), ), get_dataset_srid_alchemy_expression(md, default_crs), type_=Geometry)
def make_grid(self, resolution, geom=None, conditions=None): """ :param resolution: length of side of grid square in meters :type resolution: int :param geom: string representation of geojson fragment :type geom: str :param conditions: conditions on columns to filter on :type conditions: list of SQLAlchemy binary operations (e.g. col > value) :return: grid: result proxy with all result rows size_x, size_y: the horizontal and vertical size of the grid squares in degrees """ if conditions is None: conditions = [] # We need to convert resolution (given in meters) to degrees # - which is the unit of measure for EPSG 4326 - # - in order to generate our grid. center = self.get_bbox_center() # center[1] is longitude size_x, size_y = get_size_in_degrees(resolution, center[1]) # Generate a count for each resolution by resolution square t = self.point_table q = session.query(func.count(t.c.hash), func.ST_SnapToGrid(t.c.geom, size_x, size_y) .label('squares'))\ .filter(*conditions)\ .group_by('squares') if geom: q = q.filter(t.c.geom.ST_Within(func.ST_GeomFromGeoJSON(geom))) return session.execute(q), size_x, size_y
def import_nivo_sensor_station(): # this need refactor res = requests.get(f"{Config.METEO_FRANCE_NIVO_BASE_URL}/postesNivo.json") res.raise_for_status() with connection_scope() as con: with con.begin(): for feature in res.json()["features"]: pointz = feature["geometry"] pointz["coordinates"].append( int(feature["properties"]["Altitude"])) mf_id = (feature["properties"]["ID"] if feature["properties"]["ID"] != "" else None) ins = (insert(SensorStationTable).values( **{ "nss_name": feature["properties"]["Nom"], "nss_meteofrance_id": mf_id, "the_geom": func.ST_SetSRID( func.ST_GeomFromGeoJSON(json.dumps(pointz)), 4326), }).on_conflict_do_nothing(index_elements=["nss_name"])) con.execute(ins) inserted = (con.execute( select([func.count(SensorStationTable.c.nss_id).label("count") ])).first().count) click.echo(f"{inserted} sensor station imported")
def is_closed_polygon(self, geometry: Dict) -> bool: try: is_closed = self.session.query( func.ST_isclosed(func.ST_GeomFromGeoJSON( json.dumps(geometry))).label('closed')).one().closed except BaseException: return False return is_closed
def fill_regions(self): """Fill the polygon of Ukraine for check if the coordinates of problem lie on the map of Ukraine. """ region = Region(id=1, name='Ukraine', location=func.ST_GeomFromGeoJSON( json.dumps(geo_ukraine))) self.session.add(region) self.session.commit() self._fix_id(obj_res=Region)
def timeseries(self, agg_unit, start, end, geom=None, column_filters=None): # Reading this blog post # http://no0p.github.io/postgresql/2014/05/08/timeseries-tips-pg.html # inspired this implementation. t = self.point_table # Special case for the 'quarter' unit of aggregation. step = '3 months' if agg_unit == 'quarter' else '1 ' + agg_unit # Create a CTE to represent every time bucket in the timeseries # with a default count of 0 day_generator = func.generate_series(func.date_trunc(agg_unit, start), func.date_trunc(agg_unit, end), step) defaults = select([sa.literal_column("0").label('count'), day_generator.label('time_bucket')]) \ .alias('defaults') where_filters = [t.c.point_date >= start, t.c.point_date <= end] if column_filters is not None: # Column filters has to be iterable here, because the '+' operator # behaves differently for SQLAlchemy conditions. Instead of # combining the conditions together, it would try to build # something like :param1 + <column_filters> as a new condition. where_filters += [column_filters] # Create a CTE that grabs the number of records contained in each time # bucket. Will only have rows for buckets with records. actuals = select([func.count(t.c.hash).label('count'), func.date_trunc(agg_unit, t.c.point_date). label('time_bucket')]) \ .where(sa.and_(*where_filters)) \ .group_by('time_bucket') # Also filter by geometry if requested if geom: contains = func.ST_Within(t.c.geom, func.ST_GeomFromGeoJSON(geom)) actuals = actuals.where(contains) # Need to alias to make it usable in a subexpression actuals = actuals.alias('actuals') # Outer join the default and observed values # to create the timeseries select statement. # If no observed value in a bucket, use the default. name = sa.literal_column("'{}'".format(self.dataset_name)) \ .label('dataset_name') bucket = defaults.c.time_bucket.label('time_bucket') count = func.coalesce(actuals.c.count, defaults.c.count).label('count') ts = select([name, bucket, count]). \ select_from(defaults.outerjoin(actuals, actuals.c.time_bucket == defaults.c.time_bucket)) return ts
def timeseries(self, agg_unit, start, end, geom=None, column_filters=None): # Reading this blog post # http://no0p.github.io/postgresql/2014/05/08/timeseries-tips-pg.html # inspired this implementation. t = self.point_table if agg_unit == 'quarter': step = '3 months' else: step = '1 ' + agg_unit # Create a CTE to represent every time bucket in the timeseries # with a default count of 0 day_generator = func.generate_series(func.date_trunc(agg_unit, start), func.date_trunc(agg_unit, end), step) defaults = select([sa.literal_column("0").label('count'), day_generator.label('time_bucket')])\ .alias('defaults') # Create a CTE that grabs the number of records # contained in each time bucket. # Will only have rows for buckets with records. where_filters = [t.c.point_date >= start, t.c.point_date <= end] if column_filters: where_filters += column_filters actuals = select([func.count(t.c.hash).label('count'), func.date_trunc(agg_unit, t.c.point_date). label('time_bucket')])\ .where(sa.and_(*where_filters))\ .group_by('time_bucket') # Also filter by geometry if requested if geom: contains = func.ST_Within(t.c.geom, func.ST_GeomFromGeoJSON(geom)) actuals = actuals.where(contains) # Need to alias to make it usable in a subexpression actuals = actuals.alias('actuals') # Outer join the default and observed values # to create the timeseries select statement. # If no observed value in a bucket, use the default. name = sa.literal_column("'{}'".format(self.dataset_name))\ .label('dataset_name') bucket = defaults.c.time_bucket.label('time_bucket') count = func.coalesce(actuals.c.count, defaults.c.count).label('count') ts = select([name, bucket, count]).\ select_from(defaults.outerjoin(actuals, actuals.c.time_bucket == defaults.c.time_bucket)) return ts
def createNewItem(): print(request.get_json()) data = request.get_json() data["features"][0]["geometry"]["crs"] = { "type": "name", "properties": { "name": "EPSG:4326" } } print(data["features"][0]["geometry"]) result = json.dumps(data["features"][0]["geometry"]) if data["features"][0]["geometry"]["type"] == "Point": tree = Tree(geom=func.ST_GeomFromGeoJSON(result)) db.session.add(tree) else: building = Building(geom=func.ST_GeomFromGeoJSON(result)) db.session.add(building) db.session.commit() return redirect(url_for('index'))
def narrow_candidates(cls, dataset_names, start, end, geom=None): """ :param dataset_names: Names of point datasets to be considered :return names: Names of point datasets whose bounding box and date range interesects with the given bounds. """ # Filter out datsets that don't intersect the time boundary q = session.query(cls.dataset_name)\ .filter(cls.dataset_name.in_(dataset_names), cls.date_added != None, cls.obs_from < end, cls.obs_to > start) # or the geometry boundary if geom: intersecting = cls.bbox.ST_Intersects(func.ST_GeomFromGeoJSON(geom)) q = q.filter(intersecting) return [row.dataset_name for row in q.all()]
def patch_links(): # Get request body as JSON document body = request.get_json() # Sanitise body if body is None: raise ApiBadRequest('request body must be non-empty') if not isinstance(body, dict): raise ApiBadRequest('request body must be a JSON object') # Extract create requests try: create_requests = body['create'] except KeyError: create_requests = [] if not isinstance(create_requests, list) or len(create_requests) > PAGE_LIMIT: raise ApiBadRequest( 'create request must be an array of at most {0} items'.format( PAGE_LIMIT)) # Process create requests created_links = [] for r in create_requests: geom_geojson = json.dumps( dict(type='LineString', coordinates=r['coordinates'])) created_links.append( Link(uuid=uuid.uuid4().hex, geom=func.ST_SetSRID(func.ST_GeomFromGeoJSON(geom_geojson), 4326))) db.session.add_all(created_links) def make_create_response(l): id = uuid_to_urlsafe_id(l.uuid) return dict(id=id, url=url_for('.link', unverified_link_id=id, _external=True)) create_responses = list(make_create_response(l) for l in created_links) db.session.commit() response = dict(create=create_responses) return jsonify(response)
def within(id_malette, id_sensors, n: hug.types.number, response): """return all the sensors within {n} meters to sensors(id_sensors, id_malette)""" logger.debug("Call to within with : id_malette: %r, id_sensors: %r, distance: %r ", id_malette, id_sensors, n) schm = schema.SensorsSchema() ids = {"id_malette": id_malette, "id_sensors": id_sensors} try: inst = schm.get_instance(ids) except SQLAlchemyError as err: logger.error("Error when getting sensor from db : %r", err) schm.Meta.sqla_session.rollback() inst = None if not inst: response.status = HTTP_400 logger.debug("Returning 400 status") return "Can't find sensors" geom = json.dumps(inst.gps_pos) try: logger.debug("Searching sensors in the defined zone (distance %r, around %r)", n, geom) insts = schm.Meta.sqla_session.query(models.Sensors).filter( func.ST_DWithin( func.ST_GeomFromGeoJSON(geom), models.Sensors.gps_pos, n) ) except SQLAlchemyError as err: logger.error("Error when requesting sensor in the disance zone : %r", err) schm.Meta.sqla_session.rollback() insts = None d = schm.dump(insts, many=True).data logger.debug("Returning theses sensors : %r", d) return d
def get_collection_items(collection_id=None, roles=[], item_id=None, bbox=None, time=None, ids=None, collections=None, cubes=None, intersects=None, page=1, limit=10, query=None, **kwargs): """Retrieve a list of collection items based on filters. :param collection_id: Single Collection ID to include in the search for items. Only Items in one of the provided Collection will be searched, defaults to None :type collection_id: str, optional :param item_id: item identifier, defaults to None :type item_id: str, optional :param bbox: bounding box for intersection [west, north, east, south], defaults to None :type bbox: list, optional :param time: Single date+time, or a range ('/' seperator), formatted to RFC 3339, section 5.6, defaults to None :type time: str, optional :param ids: Array of Item ids to return. All other filter parameters that further restrict the number of search results are ignored, defaults to None :type ids: list, optional :param collections: Array of Collection IDs to include in the search for items. Only Items in one of the provided Collections will be searched, defaults to None :type collections: list, optional :param cubes: Bool indicating if only cubes should be returned, defaults to None :type cubes: bool, optional :param intersects: Searches items by performing intersection between their geometry and provided GeoJSON geometry. All GeoJSON geometry types must be supported., defaults to None :type intersects: dict, optional :param page: The page offset of results, defaults to 1 :type page: int, optional :param limit: The maximum number of results to return (page size), defaults to 10 :type limit: int, optional :return: list of collectio items :rtype: list """ columns = [ Collection.name.label('collection'), Item.name.label('item'), Item.start_date.label('start'), Item.end_date.label('end'), Item.assets, func.ST_AsGeoJSON(Item.geom).label('geom'), func.Box2D(Item.geom).label('bbox'), Tile.name.label('tile') ] where = [ Collection.id == Item.collection_id, Item.tile_id == Tile.id, or_(Collection.is_public.is_(True), Collection.id.in_([int(r.split(':')[0]) for r in roles])) ] if ids is not None: where += [Item.id.in_(ids.split(','))] elif item_id is not None: where += [Item.id.like(item_id)] else: if collections is not None: where += [Collection.name.in_(collections.split(','))] elif collection_id is not None: where += [Collection.name.like(collection_id)] if intersects is not None: where += [ func.ST_Intersects(func.ST_GeomFromGeoJSON(str(intersects)), Item.geom) ] if query: filters = create_query_filter(query) if filters: where += filters if bbox is not None: try: split_bbox = [float(x) for x in bbox.split(',')] where += [ func.ST_Intersects( func.ST_MakeEnvelope(split_bbox[0], split_bbox[1], split_bbox[2], split_bbox[3], func.ST_SRID(Item.geom)), Item.geom) ] except: raise ( InvalidBoundingBoxError(f"'{bbox}' is not a valid bbox.'")) if time is not None: if "/" in time: time_start, time_end = time.split("/") time_end = datetime.fromisoformat(time_end) where += [ or_(Item.end_date <= time_end, Item.start_date <= time_end) ] else: time_start = datetime.fromisoformat(time) where += [ or_(Item.start_date >= time_start, Item.end_date >= time_start) ] query = session.query(*columns).filter(*where).order_by( Item.start_date.desc()) result = query.paginate(page=int(page), per_page=int(limit), error_out=False, max_per_page=int(BDC_STAC_MAX_LIMIT)) return result
def get_collection_items( collection_id=None, roles=[], item_id=None, bbox=None, datetime=None, ids=None, collections=None, intersects=None, page=1, limit=10, query=None, **kwargs, ): """Retrieve a list of collection items based on filters. :param collection_id: Single Collection ID to include in the search for items. Only Items in one of the provided Collection will be searched, defaults to None :type collection_id: str, optional :param item_id: item identifier, defaults to None :type item_id: str, optional :param bbox: bounding box for intersection [west, north, east, south], defaults to None :type bbox: list, optional :param datetime: Single date+time, or a range ('/' seperator), formatted to RFC 3339, section 5.6. Use double dots '..' for open date ranges, defaults to None. If the start or end date of an image generated by a temporal composition intersects the given datetime or range it will be included in the result. :type datetime: str, optional :param ids: Array of Item ids to return. All other filter parameters that further restrict the number of search results are ignored, defaults to None :type ids: list, optional :param collections: Array of Collection IDs to include in the search for items. Only Items in one of the provided Collections will be searched, defaults to None :type collections: list, optional :param intersects: Searches items by performing intersection between their geometry and provided GeoJSON geometry. All GeoJSON geometry types must be supported., defaults to None :type intersects: dict, optional :param page: The page offset of results, defaults to 1 :type page: int, optional :param limit: The maximum number of results to return (page size), defaults to 10 :type limit: int, optional :return: list of collectio items :rtype: list """ columns = [ func.concat(Collection.name, "-", Collection.version).label("collection"), Collection.collection_type, Collection._metadata.label("meta"), Item._metadata.label("item_meta"), Item.name.label("item"), Item.id, Item.collection_id, Item.start_date.label("start"), Item.end_date.label("end"), Item.assets, Item.created, Item.updated, cast(Item.cloud_cover, Float).label("cloud_cover"), func.ST_AsGeoJSON(Item.geom).label("geom"), func.Box2D(Item.geom).label("bbox"), Tile.name.label("tile"), ] where = [ Collection.id == Item.collection_id, or_(Collection.is_public.is_(True), Collection.id.in_([int(r.split(":")[0]) for r in roles])), ] if ids is not None: where += [Item.name.in_(ids.split(","))] else: if collections is not None: where += [ func.concat(Collection.name, "-", Collection.version).in_(collections.split(",")) ] elif collection_id is not None: where += [ func.concat(Collection.name, "-", Collection.version) == collection_id ] if item_id is not None: where += [Item.name.like(item_id)] if query: filters = create_query_filter(query) if filters: where += filters if intersects is not None: where += [ func.ST_Intersects(func.ST_GeomFromGeoJSON(str(intersects)), Item.geom) ] elif bbox is not None: try: split_bbox = [float(x) for x in bbox.split(",")] if split_bbox[0] == split_bbox[2] or split_bbox[ 1] == split_bbox[3]: raise InvalidBoundingBoxError("") where += [ func.ST_Intersects( func.ST_MakeEnvelope( split_bbox[0], split_bbox[1], split_bbox[2], split_bbox[3], func.ST_SRID(Item.geom), ), Item.geom, ) ] except: raise ( InvalidBoundingBoxError(f"'{bbox}' is not a valid bbox.")) if datetime is not None: date_filter = None if "/" in datetime: matches_open = ("..", "") time_start, time_end = datetime.split("/") if time_start in matches_open: # open start date_filter = [ or_(Item.start_date <= time_end, Item.end_date <= time_end) ] elif time_end in matches_open: # open end date_filter = [ or_(Item.start_date >= time_start, Item.end_date >= time_start) ] else: # closed range date_filter = [ or_( and_(Item.start_date >= time_start, Item.start_date <= time_end), and_(Item.end_date >= time_start, Item.end_date <= time_end), and_(Item.start_date < time_start, Item.end_date > time_end), ) ] else: date_filter = [ and_(Item.start_date <= datetime, Item.end_date >= datetime) ] where += date_filter outer = [Item.tile_id == Tile.id] query = session.query(*columns).outerjoin( Tile, *outer).filter(*where).order_by(Item.start_date.desc(), Item.id) result = query.paginate(page=int(page), per_page=int(limit), error_out=False, max_per_page=BDC_STAC_MAX_LIMIT) return result
def pg_geometry(feature): geom = dumps(feature["geometry"]) _ = func.ST_GeomFromGeoJSON(geom) return func.ST_SetSRID(func.ST_MakeValid(func.ST_Multi(_)), 4326)
def make_query(table, raw_query_params): table_keys = table.columns.keys() args_keys = raw_query_params.keys() resp = { 'meta': { 'status': 'error', 'message': '', }, 'objects': [], } status_code = 200 query_clauses = [] valid_query = True #print "make_query(): args_keys = ", args_keys if 'offset' in args_keys: args_keys.remove('offset') if 'limit' in args_keys: args_keys.remove('limit') if 'order_by' in args_keys: args_keys.remove('order_by') if 'weather' in args_keys: args_keys.remove('weather') for query_param in args_keys: try: field, operator = query_param.split('__') #print "make_query(): field, operator =", field, operator except ValueError: field = query_param operator = 'eq' query_value = raw_query_params.get(query_param) column = table.columns.get(field) if field not in table_keys: resp['meta']['message'] = '"%s" is not a valid fieldname' % field status_code = 400 valid_query = False elif operator == 'in': query = column.in_(query_value.split(',')) query_clauses.append(query) elif operator == 'within': geo = json.loads(query_value) #print "make_query(): geo is", geo.items() if 'features' in geo.keys(): val = geo['features'][0]['geometry'] elif 'geometry' in geo.keys(): val = geo['geometry'] else: val = geo if val['type'] == 'LineString': shape = asShape(val) lat = shape.centroid.y # 100 meters by default x, y = getSizeInDegrees(100, lat) val = shape.buffer(y).__geo_interface__ val['crs'] = {"type": "name", "properties": {"name": "EPSG:4326"}} query = column.ST_Within(func.ST_GeomFromGeoJSON(json.dumps(val))) #print "make_query: val=", val #print "make_query(): query = ", query query_clauses.append(query) elif operator.startswith('time_of_day'): if operator.endswith('ge'): query = func.date_part('hour', column).__ge__(query_value) elif operator.endswith('le'): query = func.date_part('hour', column).__le__(query_value) query_clauses.append(query) else: try: attr = filter(lambda e: hasattr(column, e % operator), ['%s', '%s_', '__%s__'])[0] % operator except IndexError: resp['meta'][ 'message'] = '"%s" is not a valid query operator' % operator status_code = 400 valid_query = False break if query_value == 'null': # pragma: no cover query_value = None query = getattr(column, attr)(query_value) query_clauses.append(query) #print "make_query(): query_clauses=", query_clauses return valid_query, query_clauses, resp, status_code
# Load geojson from file to dictionary file = open('{}'.format(filepath_square), 'r') square_geojson = geojson.loads(file.read()) # Get relevant variables. Convert all geometry types to multipolygon # As it is easier to have just one case in the database. square_geojson_geometry = square_geojson['geometry'] if square_geojson_geometry['type'] == "Polygon": square_geojson_geometry['type'] = "MultiPolygon" square_geojson_geometry['coordinates'] = [ square_geojson_geometry['coordinates'] ] square_geojson_geometry_str = json.dumps(square_geojson_geometry) # Append dictionary of variables to values. Some names change! values.append({ 'code': square_geojson['properties']['code'], 'region_id': square_geojson['properties']['region'], 'province_id': square_geojson['properties']['province'], 'commune_id': square_geojson['properties']['commune'], 'mpoly': func.ST_GeomFromGeoJSON(square_geojson_geometry_str) }) # Do the SQL if verbose: print('Inserting data (SQL)') stmt = sql_square_table.insert().values(values) sql_connection.execute(stmt)
def make_query(table, raw_query_params): table_keys = list(table.columns.keys()) args_keys = list(raw_query_params.keys()) resp = { 'meta': { 'status': 'error', 'message': '', }, 'objects': [], } status_code = 200 query_clauses = [] valid_query = True if 'offset' in args_keys: args_keys.remove('offset') if 'limit' in args_keys: args_keys.remove('limit') if 'order_by' in args_keys: args_keys.remove('order_by') if 'weather' in args_keys: args_keys.remove('weather') for query_param in args_keys: try: field, operator = query_param.split('__') except ValueError: field = query_param operator = 'eq' query = None query_value = raw_query_params.get(query_param) column = table.columns.get(field) if field not in table_keys: resp['meta']['message'] = '{!r} is not a valid field name'.format( field) status_code = 400 valid_query = False elif operator == 'in': query = column.in_(query_value.split(',')) query_clauses.append(query) elif operator == 'within': geo = json.loads(query_value) if 'features' in list(geo.keys()): val = geo['features'][0]['geometry'] elif 'geometry' in list(geo.keys()): val = geo['geometry'] else: val = geo if val['type'] == 'LineString': shape = shapely.geometry.asShape(val) lat = shape.centroid.y # 100 meters by default x, y = get_size_in_degrees(100, lat) val = shape.buffer(y).__geo_interface__ val['crs'] = {'type': 'name', 'properties': {'name': 'EPSG:4326'}} query = column.ST_Within(func.ST_GeomFromGeoJSON(json.dumps(val))) query_clauses.append(query) elif operator.startswith('time_of_day'): if operator.endswith('ge'): query = func.date_part('hour', column).__ge__(query_value) elif operator.endswith('le'): query = func.date_part('hour', column).__le__(query_value) query_clauses.append(query) else: try: attr = list([ e for e in ['%s', '%s_', '__%s__'] if hasattr(column, e % operator) ])[0] % operator except IndexError: msg = '{!r} is not a valid query operator' resp['meta']['message'] = msg.format(operator) status_code = 400 valid_query = False break if query_value == 'null': # pragma: no cover query_value = None query = getattr(column, attr)(query_value) query_clauses.append(query) return valid_query, query_clauses, resp, status_code
def geojson_to_geo(self, geometry: Dict) -> object: return self.session.query( func.ST_SetSRID(func.ST_GeomFromGeoJSON(json.dumps(geometry)), 4326).label('geom')).one().geom
def get_collection_items( collection_id=None, roles=None, item_id=None, bbox=None, datetime=None, ids=None, collections=None, intersects=None, page=1, limit=10, query=None, **kwargs, ) -> Pagination: """Retrieve a list of collection items based on filters. :param collection_id: Single Collection ID to include in the search for items. Only Items in one of the provided Collection will be searched, defaults to None :type collection_id: str, optional :param item_id: item identifier, defaults to None :type item_id: str, optional :param bbox: bounding box for intersection [west, north, east, south], defaults to None :type bbox: list, optional :param datetime: Single date+time, or a range ('/' seperator), formatted to RFC 3339, section 5.6. Use double dots '..' for open date ranges, defaults to None. If the start or end date of an image generated by a temporal composition intersects the given datetime or range it will be included in the result. :type datetime: str, optional :param ids: Array of Item ids to return. All other filter parameters that further restrict the number of search results are ignored, defaults to None :type ids: list, optional :param collections: Array of Collection IDs to include in the search for items. Only Items in one of the provided Collections will be searched, defaults to None :type collections: list, optional :param intersects: Searches items by performing intersection between their geometry and provided GeoJSON geometry. All GeoJSON geometry types must be supported., defaults to None :type intersects: dict, optional :param page: The page offset of results, defaults to 1 :type page: int, optional :param limit: The maximum number of results to return (page size), defaults to 10 :type limit: int, optional :return: list of collectio items :rtype: list """ columns = [ func.concat(Collection.name, "-", Collection.version).label("collection"), Collection.collection_type, Collection._metadata.label("meta"), Item._metadata.label("item_meta"), Item.name.label("item"), Item.id, Item.collection_id, Item.start_date.label("start"), Item.end_date.label("end"), Item.assets, Item.created, Item.updated, cast(Item.cloud_cover, Float).label("cloud_cover"), func.ST_AsGeoJSON(Item.geom).label("geom"), func.ST_XMin(Item.geom).label("xmin"), func.ST_XMax(Item.geom).label("xmax"), func.ST_YMin(Item.geom).label("ymin"), func.ST_YMax(Item.geom).label("ymax"), Tile.name.label("tile"), ] if roles is None: roles = [] where = [ Collection.id == Item.collection_id, or_(Collection.is_public.is_(True), Collection.id.in_([int(r.split(":")[0]) for r in roles])), ] collections_where = _where_collections(collection_id, collections) collections_where.append( or_(Collection.is_public.is_(True), Collection.id.in_([int(r.split(":")[0]) for r in roles]))) outer_join = [(Tile, [Item.tile_id == Tile.id])] _geom_tables = [] _collections = Collection.query().filter(*collections_where).all() if bbox or intersects: grids = GridRefSys.query().filter( GridRefSys.id.in_([c.grid_ref_sys_id for c in _collections])).all() for grid in grids: geom_table = grid.geom_table if geom_table is None: continue _geom_tables.append(geom_table) if ids is not None: if isinstance(ids, str): ids = ids.split(",") where += [Item.name.in_(ids)] else: where += _where_collections(collection_id, collections) if item_id is not None: where += [Item.name.like(item_id)] if query: filters = create_query_filter(query) if filters: where += filters if intersects is not None: # Intersect with native grid if there is geom_expr = func.ST_GeomFromGeoJSON(str(intersects)) grids_where, joins = intersect_grids(geom_expr, geom_tables=_geom_tables) where += grids_where outer_join += joins elif bbox is not None: try: if isinstance(bbox, str): bbox = bbox.split(",") bbox = [float(x) for x in bbox] if bbox[0] == bbox[2] or bbox[1] == bbox[3]: raise InvalidBoundingBoxError("") geom_expr = func.ST_MakeEnvelope(bbox[0], bbox[1], bbox[2], bbox[3], func.ST_SRID(Item.geom)) grid_where, joins = intersect_grids(geom_expr, geom_tables=_geom_tables) where += grid_where outer_join += joins except (ValueError, InvalidBoundingBoxError) as e: abort(400, f"'{bbox}' is not a valid bbox.") if datetime is not None: if "/" in datetime: matches_open = ("..", "") time_start, time_end = datetime.split("/") if time_start in matches_open: # open start date_filter = [ or_(Item.start_date <= time_end, Item.end_date <= time_end) ] elif time_end in matches_open: # open end date_filter = [ or_(Item.start_date >= time_start, Item.end_date >= time_start) ] else: # closed range date_filter = [ or_( and_(Item.start_date >= time_start, Item.start_date <= time_end), and_(Item.end_date >= time_start, Item.end_date <= time_end), and_(Item.start_date < time_start, Item.end_date > time_end), ) ] else: date_filter = [ and_(Item.start_date <= datetime, Item.end_date >= datetime) ] where += date_filter query = session.query(*columns) for entity, join_conditions in outer_join: query = query.outerjoin(entity, *join_conditions) try: query = query.filter(*where).order_by(Item.start_date.desc(), Item.id) result = query.paginate(page=int(page), per_page=int(limit), error_out=False, max_per_page=BDC_STAC_MAX_LIMIT) return result except Exception as err: msg = str(err) if hasattr(err, "orig"): msg = str(err.orig) abort(400, msg.rstrip())
def within_geojson(network: NetworkMeta, geojson: str): geom = sqla_fn.ST_GeomFromGeoJSON(geojson) within = NodeMeta.location.ST_Within(geom) query = NodeMeta.query.filter(within) query = query.filter(NodeMeta.sensor_network == network.name) return query
from geonature.utils.env import db from geonature.utils.config import config from geonature.core.ref_geo.models import BibAreasTypes, LiMunicipalities, LAreas from utils_flask_sqla.response import json_resp routes = Blueprint("ref_geo", __name__) altitude_stmt = sa.select([ sa.column('altitude_min'), sa.column('altitude_max'), ]).select_from( func.ref_geo.fct_get_altitude_intersection( func.ST_SetSRID( func.ST_GeomFromGeoJSON(sa.bindparam('geojson')), 4326, ), )) geojson_intersect_filter = func.ST_Intersects( LAreas.geom, func.ST_Transform( func.ST_SetSRID(func.ST_GeomFromGeoJSON(sa.bindparam('geojson')), 4326), config['LOCAL_SRID'], ), ) area_size_func = func.ST_Area( func.ST_Transform( func.ST_SetSrid(